diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 40d2cc127690e5..fae5138435665e 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -26,8 +26,8 @@ Objects/dict* @methane
# Ignoring importlib.h so as to not get flagged on
# all pull requests that change the emitted
# bytecode.
-**/*import*.c @python/import-team
-**/*import*.py @python/import-team
+**/*import*.c @brettcannon @encukou @ericsnowcurrently @ncoghlan @warsaw
+**/*import*.py @brettcannon @encukou @ericsnowcurrently @ncoghlan @warsaw
# SSL
@@ -43,6 +43,12 @@ Python/bootstrap_hash.c @python/crypto-team
**/*imap* @python/email-team
**/*poplib* @python/email-team
+# Parser/Pgen
+/Parser/pgen/ @pablogsal
+
+# SQLite 3
+**/*sqlite* @berkerpeksag
+
# subprocess
**/*subprocess* @gpshead
@@ -50,6 +56,9 @@ Python/bootstrap_hash.c @python/crypto-team
/PC/ @python/windows-team
/PCbuild/ @python/windows-team
+# Urllib
+**/*robotparser* @berkerpeksag
+
# Windows installer packages
/Tools/msi/ @python/windows-team
/Tools/nuget/ @python/windows-team
diff --git a/Doc/Makefile b/Doc/Makefile
index 53877e61329089..cf1bb88b0b8ea4 100644
--- a/Doc/Makefile
+++ b/Doc/Makefile
@@ -48,11 +48,19 @@ build:
@if [ -f ../Misc/NEWS ] ; then \
echo "Using existing Misc/NEWS file"; \
cp ../Misc/NEWS build/NEWS; \
- elif [ -d ../Misc/NEWS.d ]; then \
- echo "Building NEWS from Misc/NEWS.d with blurb"; \
- $(BLURB) merge -f build/NEWS; \
+ elif $(BLURB) help >/dev/null 2>&1 && $(SPHINXBUILD) --version >/dev/null 2>&1; then \
+ if [ -d ../Misc/NEWS.d ]; then \
+ echo "Building NEWS from Misc/NEWS.d with blurb"; \
+ $(BLURB) merge -f build/NEWS; \
+ else \
+ echo "Neither Misc/NEWS.d nor Misc/NEWS found; cannot build docs"; \
+ exit 1; \
+ fi \
else \
- echo "Neither Misc/NEWS.d nor Misc/NEWS found; cannot build docs"; \
+ echo ""; \
+ echo "Missing the required blurb or sphinx-build tools."; \
+ echo "Please run 'make venv' to install local copies."; \
+ echo ""; \
exit 1; \
fi
$(SPHINXBUILD) $(ALLSPHINXOPTS)
diff --git a/Doc/c-api/datetime.rst b/Doc/c-api/datetime.rst
index 78724619ea3c52..b7949e235005c8 100644
--- a/Doc/c-api/datetime.rst
+++ b/Doc/c-api/datetime.rst
@@ -98,6 +98,22 @@ Macros to create objects:
minute, second and microsecond.
+.. c:function:: PyObject* PyDateTime_FromDateAndTimeAndFold(int year, int month, int day, int hour, int minute, int second, int usecond, int fold)
+
+ Return a :class:`datetime.datetime` object with the specified year, month, day, hour,
+ minute, second, microsecond and fold.
+
+ .. versionadded:: 3.6
+
+
+.. c:function:: PyObject* PyTime_FromTimeAndFold(int hour, int minute, int second, int usecond, int fold)
+
+ Return a :class:`datetime.time` object with the specified hour, minute, second,
+ microsecond and fold.
+
+ .. versionadded:: 3.6
+
+
.. c:function:: PyObject* PyTime_FromTime(int hour, int minute, int second, int usecond)
Return a :class:`datetime.time` object with the specified hour, minute, second and
diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst
index 8c5f66cbef722b..13f0aff1cf99b6 100644
--- a/Doc/c-api/exceptions.rst
+++ b/Doc/c-api/exceptions.rst
@@ -53,8 +53,8 @@ Printing and clearing
.. c:function:: void PyErr_PrintEx(int set_sys_last_vars)
Print a standard traceback to ``sys.stderr`` and clear the error indicator.
- **Unless** the error is a ``SystemExit``. In that case the no traceback
- is printed and Python process will exit with the error code specified by
+ **Unless** the error is a ``SystemExit``, in that case no traceback is
+ printed and the Python process will exit with the error code specified by
the ``SystemExit`` instance.
Call this function **only** when the error indicator is set. Otherwise it
diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst
index 7ef11228a33dcf..367c069a7ff4cd 100644
--- a/Doc/c-api/init.rst
+++ b/Doc/c-api/init.rst
@@ -1080,6 +1080,18 @@ All of the following functions must be called after :c:func:`Py_Initialize`.
*tstate*, which should not be *NULL*. The lock must have been created earlier.
If this thread already has the lock, deadlock ensues.
+ .. note::
+ Calling this function from a thread when the runtime is finalizing
+ will terminate the thread, even if the thread was not created by Python.
+ You can use :c:func:`_Py_IsFinalizing` or :func:`sys.is_finalizing` to
+ check if the interpreter is in process of being finalized before calling
+ this function to avoid unwanted termination.
+
+ .. versionchanged:: 3.8
+ Updated to be consistent with :c:func:`PyEval_RestoreThread`,
+ :c:func:`Py_END_ALLOW_THREADS`, and :c:func:`PyGILState_Ensure`,
+ and terminate the current thread if called while the interpreter is finalizing.
+
:c:func:`PyEval_RestoreThread` is a higher-level function which is always
available (even when threads have not been initialized).
@@ -1106,6 +1118,18 @@ All of the following functions must be called after :c:func:`Py_Initialize`.
:c:func:`PyEval_RestoreThread` or :c:func:`PyEval_AcquireThread`
instead.
+ .. note::
+ Calling this function from a thread when the runtime is finalizing
+ will terminate the thread, even if the thread was not created by Python.
+ You can use :c:func:`_Py_IsFinalizing` or :func:`sys.is_finalizing` to
+ check if the interpreter is in process of being finalized before calling
+ this function to avoid unwanted termination.
+
+ .. versionchanged:: 3.8
+ Updated to be consistent with :c:func:`PyEval_RestoreThread`,
+ :c:func:`Py_END_ALLOW_THREADS`, and :c:func:`PyGILState_Ensure`,
+ and terminate the current thread if called while the interpreter is finalizing.
+
.. c:function:: void PyEval_ReleaseLock()
diff --git a/Doc/distutils/apiref.rst b/Doc/distutils/apiref.rst
index a825efc1a67243..1facc0408d5b0f 100644
--- a/Doc/distutils/apiref.rst
+++ b/Doc/distutils/apiref.rst
@@ -277,6 +277,11 @@ the full reference.
| | simply skip the extension. | |
+------------------------+--------------------------------+---------------------------+
+ .. versionchanged:: 3.8
+
+ On Unix, C extensions are no longer linked to libpython except on
+ Android.
+
.. class:: Distribution
diff --git a/Doc/distutils/setupscript.rst b/Doc/distutils/setupscript.rst
index 54ed1aebc242d8..a65a26ac57facc 100644
--- a/Doc/distutils/setupscript.rst
+++ b/Doc/distutils/setupscript.rst
@@ -523,7 +523,7 @@ following way::
setup(...,
data_files=[('bitmaps', ['bm/b1.gif', 'bm/b2.gif']),
- ('config', ['cfg/data.cfg']),
+ ('config', ['cfg/data.cfg'])],
)
Each (*directory*, *files*) pair in the sequence specifies the installation
diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst
index bb8a40d0fb06f5..b4bf9b9e6f75f8 100644
--- a/Doc/extending/newtypes_tutorial.rst
+++ b/Doc/extending/newtypes_tutorial.rst
@@ -92,6 +92,7 @@ The second bit is the definition of the type object. ::
.tp_doc = "Custom objects",
.tp_basicsize = sizeof(CustomObject),
.tp_itemsize = 0,
+ .tp_flags = Py_TPFLAGS_DEFAULT,
.tp_new = PyType_GenericNew,
};
diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst
index 31614189a62d2c..f14e8cc824ef74 100644
--- a/Doc/faq/programming.rst
+++ b/Doc/faq/programming.rst
@@ -16,6 +16,9 @@ Is there a source code level debugger with breakpoints, single-stepping, etc.?
Yes.
+Several debuggers for Python are described below, and the built-in function
+:func:`breakpoint` allows you to drop into any of them.
+
The pdb module is a simple but adequate console-mode debugger for Python. It is
part of the standard Python library, and is :mod:`documented in the Library
Reference Manual `. You can also write your own debugger by using the code
diff --git a/Doc/howto/regex.rst b/Doc/howto/regex.rst
index d385d991344b28..d574c3736b1cb7 100644
--- a/Doc/howto/regex.rst
+++ b/Doc/howto/regex.rst
@@ -942,6 +942,13 @@ given numbers, so you can retrieve information about a group in two ways::
>>> m.group(1)
'Lots'
+Additionally, you can retrieve named groups as a dictionary with
+:meth:`~re.Match.groupdict`::
+
+ >>> m = re.match(r'(?P\w+) (?P\w+)', 'Jane Doe')
+ >>> m.groupdict()
+ {'first': 'Jane', 'last': 'Doe'}
+
Named groups are handy because they let you use easily-remembered names, instead
of having to remember numbers. Here's an example RE from the :mod:`imaplib`
module::
diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst
index 8d157fd5f599ae..06f673be7902c2 100644
--- a/Doc/library/asyncio-eventloop.rst
+++ b/Doc/library/asyncio-eventloop.rst
@@ -281,9 +281,9 @@ clocks to track time.
the event loop's internal monotonic clock.
.. note::
-
- Timeouts (relative *delay* or absolute *when*) should not
- exceed one day.
+ .. versionchanged:: 3.8
+ In Python 3.7 and earlier timeouts (relative *delay* or absolute *when*)
+ should not exceed one day. This has been fixed in Python 3.8.
.. seealso::
@@ -397,9 +397,27 @@ Opening network connections
If given, these should all be integers from the corresponding
:mod:`socket` module constants.
+ * *happy_eyeballs_delay*, if given, enables Happy Eyeballs for this
+ connection. It should
+ be a floating-point number representing the amount of time in seconds
+ to wait for a connection attempt to complete, before starting the next
+ attempt in parallel. This is the "Connection Attempt Delay" as defined
+ in :rfc:`8305`. A sensible default value recommended by the RFC is ``0.25``
+ (250 milliseconds).
+
+ * *interleave* controls address reordering when a host name resolves to
+ multiple IP addresses.
+ If ``0`` or unspecified, no reordering is done, and addresses are
+ tried in the order returned by :meth:`getaddrinfo`. If a positive integer
+ is specified, the addresses are interleaved by address family, and the
+ given integer is interpreted as "First Address Family Count" as defined
+ in :rfc:`8305`. The default is ``0`` if *happy_eyeballs_delay* is not
+ specified, and ``1`` if it is.
+
* *sock*, if given, should be an existing, already connected
:class:`socket.socket` object to be used by the transport.
- If *sock* is given, none of *host*, *port*, *family*, *proto*, *flags*
+ If *sock* is given, none of *host*, *port*, *family*, *proto*, *flags*,
+ *happy_eyeballs_delay*, *interleave*
and *local_addr* should be specified.
* *local_addr*, if given, is a ``(local_host, local_port)`` tuple used
@@ -410,6 +428,10 @@ Opening network connections
to wait for the TLS handshake to complete before aborting the connection.
``60.0`` seconds if ``None`` (default).
+ .. versionadded:: 3.8
+
+ The *happy_eyeballs_delay* and *interleave* parameters.
+
.. versionadded:: 3.7
The *ssl_handshake_timeout* parameter.
@@ -1601,7 +1623,7 @@ using the :meth:`loop.add_signal_handler` method::
import os
import signal
- def ask_exit(signame):
+ def ask_exit(signame, loop):
print("got signal %s: exit" % signame)
loop.stop()
@@ -1611,7 +1633,7 @@ using the :meth:`loop.add_signal_handler` method::
for signame in {'SIGINT', 'SIGTERM'}:
loop.add_signal_handler(
getattr(signal, signame),
- functools.partial(ask_exit, signame))
+ functools.partial(ask_exit, signame, loop))
await asyncio.sleep(3600)
diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst
index 18da18873dbfb5..e3f18ccb4341fe 100644
--- a/Doc/library/asyncio-sync.rst
+++ b/Doc/library/asyncio-sync.rst
@@ -17,7 +17,7 @@ those of the :mod:`threading` module with two important caveats:
argument; use the :func:`asyncio.wait_for` function to perform
operations with timeouts.
-asyncio has the following basic sychronization primitives:
+asyncio has the following basic synchronization primitives:
* :class:`Lock`
* :class:`Event`
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index 1ee23c2175a27d..abdc977354803e 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -458,6 +458,13 @@ Other constructors, all class methods:
.. versionadded:: 3.7
+.. classmethod:: date.fromisocalendar(year, week, day)
+
+ Return a :class:`date` corresponding to the ISO calendar date specified by
+ year, week and day. This is the inverse of the function :meth:`date.isocalendar`.
+
+ .. versionadded:: 3.8
+
Class attributes:
@@ -854,6 +861,16 @@ Other constructors, all class methods:
.. versionadded:: 3.7
+
+.. classmethod:: datetime.fromisocalendar(year, week, day)
+
+ Return a :class:`datetime` corresponding to the ISO calendar date specified
+ by year, week and day. The non-date components of the datetime are populated
+ with their normal default values. This is the inverse of the function
+ :meth:`datetime.isocalendar`.
+
+ .. versionadded:: 3.8
+
.. classmethod:: datetime.strptime(date_string, format)
Return a :class:`.datetime` corresponding to *date_string*, parsed according to
diff --git a/Doc/library/email.generator.rst b/Doc/library/email.generator.rst
index fc535a3e4399ff..c09ae8cbc60410 100644
--- a/Doc/library/email.generator.rst
+++ b/Doc/library/email.generator.rst
@@ -188,7 +188,7 @@ to be using :class:`BytesGenerator`, and not :class:`Generator`.
(This is required because strings cannot represent non-ASCII bytes.)
Convert any bytes with the high bit set as needed using an
ASCII-compatible :mailheader:`Content-Transfer-Encoding`. That is,
- transform parts with non-ASCII :mailheader:`Cotnent-Transfer-Encoding`
+ transform parts with non-ASCII :mailheader:`Content-Transfer-Encoding`
(:mailheader:`Content-Transfer-Encoding: 8bit`) to an ASCII compatible
:mailheader:`Content-Transfer-Encoding`, and encode RFC-invalid non-ASCII
bytes in headers using the MIME ``unknown-8bit`` character set, thus
diff --git a/Doc/library/http.cookies.rst b/Doc/library/http.cookies.rst
index f3457a0cdc7bc4..17792b200599bd 100644
--- a/Doc/library/http.cookies.rst
+++ b/Doc/library/http.cookies.rst
@@ -55,8 +55,9 @@ in Cookie name (as :attr:`~Morsel.key`).
.. class:: SimpleCookie([input])
This class derives from :class:`BaseCookie` and overrides :meth:`value_decode`
- and :meth:`value_encode` to be the identity and :func:`str` respectively.
-
+ and :meth:`value_encode`. SimpleCookie supports strings as cookie values.
+ When setting the value, SimpleCookie calls the builtin :func:`str()` to convert
+ the value to a string. Values received from HTTP are kept as strings.
.. seealso::
@@ -76,15 +77,16 @@ Cookie Objects
.. method:: BaseCookie.value_decode(val)
- Return a decoded value from a string representation. Return value can be any
- type. This method does nothing in :class:`BaseCookie` --- it exists so it can be
- overridden.
+ Return a tuple ``(real_value, coded_value)`` from a string representation.
+ ``real_value`` can be any type. This method does no decoding in
+ :class:`BaseCookie` --- it exists so it can be overridden.
.. method:: BaseCookie.value_encode(val)
- Return an encoded value. *val* can be any type, but return value must be a
- string. This method does nothing in :class:`BaseCookie` --- it exists so it can
+ Return a tuple ``(real_value, coded_value)``. *val* can be any type, but
+ ``coded_value`` will always be converted to a string.
+ This method does no encoding in :class:`BaseCookie` --- it exists so it can
be overridden.
In general, it should be the case that :meth:`value_encode` and
diff --git a/Doc/library/idle.rst b/Doc/library/idle.rst
index 11e137bf1092ed..ad449112832671 100644
--- a/Doc/library/idle.rst
+++ b/Doc/library/idle.rst
@@ -724,11 +724,9 @@ to begin after the next tab stop. (They occur every 8 'characters'). Newline
characters cause following text to appear on a new line. Other control
characters are ignored or displayed as a space, box, or something else,
depending on the operating system and font. (Moving the text cursor through
-such output with arrow keys may exhibit some surprising spacing behavior.)
+such output with arrow keys may exhibit some surprising spacing behavior.) ::
-.. code-block:: none
-
- >>> s = 'a\tb\a<\x02><\r>\bc\nd'
+ >>> s = 'a\tb\a<\x02><\r>\bc\nd' # Enter 22 chars.
>>> len(s)
14
>>> s # Display repr(s)
diff --git a/Doc/library/imaplib.rst b/Doc/library/imaplib.rst
index d0709f8b678e43..f027f82ddebe61 100644
--- a/Doc/library/imaplib.rst
+++ b/Doc/library/imaplib.rst
@@ -327,6 +327,9 @@ An :class:`IMAP4` instance has the following methods:
Shutdown connection to server. Returns server ``BYE`` response.
+ .. versionchanged:: 3.8
+ The method no longer ignores silently arbitrary exceptions.
+
.. method:: IMAP4.lsub(directory='""', pattern='*')
diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst
index 81824ddc1e54db..d12f122a57b599 100644
--- a/Doc/library/inspect.rst
+++ b/Doc/library/inspect.rst
@@ -948,6 +948,11 @@ Classes and functions
APIs. This function is retained primarily for use in code that needs to
maintain compatibility with the Python 2 ``inspect`` module API.
+ .. deprecated:: 3.8
+ Use :func:`signature` and
+ :ref:`Signature Object `, which provide a
+ better introspecting API for callables.
+
.. versionchanged:: 3.4
This function is now based on :func:`signature`, but still ignores
``__wrapped__`` attributes and includes the already bound first
diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index 3d4e5836cf205e..b3a0a5f5192da1 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -827,7 +827,7 @@ which incur interpreter overhead.
"List unique elements, preserving order. Remember only the element just seen."
# unique_justseen('AAAABBBCCDAABBB') --> A B C D A B
# unique_justseen('ABBCcAD', str.lower) --> A B C A D
- return map(next, map(itemgetter(1), groupby(iterable, key)))
+ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
def iter_except(func, exception, first=None):
""" Call a function repeatedly until an exception is raised.
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index f3b5d964ac58a8..e77a8fed377ad6 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -1453,16 +1453,19 @@ features:
.. _path_fd:
* **specifying a file descriptor:**
- For some functions, the *path* argument can be not only a string giving a path
- name, but also a file descriptor. The function will then operate on the file
- referred to by the descriptor. (For POSIX systems, Python will call the
- ``f...`` version of the function.)
-
- You can check whether or not *path* can be specified as a file descriptor on
- your platform using :data:`os.supports_fd`. If it is unavailable, using it
- will raise a :exc:`NotImplementedError`.
+ Normally the *path* argument provided to functions in the :mod:`os` module
+ must be a string specifying a file path. However, some functions now
+ alternatively accept an open file descriptor for their *path* argument.
+ The function will then operate on the file referred to by the descriptor.
+ (For POSIX systems, Python will call the variant of the function prefixed
+ with ``f`` (e.g. call ``fchdir`` instead of ``chdir``).)
+
+ You can check whether or not *path* can be specified as a file descriptor
+ for a particular function on your platform using :data:`os.supports_fd`.
+ If this functionality is unavailable, using it will raise a
+ :exc:`NotImplementedError`.
- If the function also supports *dir_fd* or *follow_symlinks* arguments, it is
+ If the function also supports *dir_fd* or *follow_symlinks* arguments, it's
an error to specify one of those when supplying *path* as a file descriptor.
.. _dir_fd:
@@ -1471,23 +1474,24 @@ features:
should be a file descriptor referring to a directory, and the path to operate
on should be relative; path will then be relative to that directory. If the
path is absolute, *dir_fd* is ignored. (For POSIX systems, Python will call
- the ``...at`` or ``f...at`` version of the function.)
+ the variant of the function with an ``at`` suffix and possibly prefixed with
+ ``f`` (e.g. call ``faccessat`` instead of ``access``).
- You can check whether or not *dir_fd* is supported on your platform using
- :data:`os.supports_dir_fd`. If it is unavailable, using it will raise a
- :exc:`NotImplementedError`.
+ You can check whether or not *dir_fd* is supported for a particular function
+ on your platform using :data:`os.supports_dir_fd`. If it's unavailable,
+ using it will raise a :exc:`NotImplementedError`.
.. _follow_symlinks:
* **not following symlinks:** If *follow_symlinks* is
``False``, and the last element of the path to operate on is a symbolic link,
- the function will operate on the symbolic link itself instead of the file the
- link points to. (For POSIX systems, Python will call the ``l...`` version of
- the function.)
+ the function will operate on the symbolic link itself rather than the file
+ pointed to by the link. (For POSIX systems, Python will call the ``l...``
+ variant of the function.)
- You can check whether or not *follow_symlinks* is supported on your platform
- using :data:`os.supports_follow_symlinks`. If it is unavailable, using it
- will raise a :exc:`NotImplementedError`.
+ You can check whether or not *follow_symlinks* is supported for a particular
+ function on your platform using :data:`os.supports_follow_symlinks`.
+ If it's unavailable, using it will raise a :exc:`NotImplementedError`.
@@ -1662,7 +1666,7 @@ features:
.. availability:: Unix.
.. versionadded:: 3.3
- Added support for specifying an open file descriptor for *path*,
+ Added support for specifying *path* as an open file descriptor,
and the *dir_fd* and *follow_symlinks* arguments.
.. versionchanged:: 3.6
@@ -1781,7 +1785,7 @@ features:
The *path* parameter became optional.
.. versionadded:: 3.3
- Added support for specifying an open file descriptor for *path*.
+ Added support for specifying *path* as an open file descriptor.
.. versionchanged:: 3.6
Accepts a :term:`path-like object`.
@@ -2593,7 +2597,7 @@ features:
The :const:`ST_RDONLY` and :const:`ST_NOSUID` constants were added.
.. versionadded:: 3.3
- Added support for specifying an open file descriptor for *path*.
+ Added support for specifying *path* as an open file descriptor.
.. versionchanged:: 3.4
The :const:`ST_NODEV`, :const:`ST_NOEXEC`, :const:`ST_SYNCHRONOUS`,
@@ -2610,59 +2614,61 @@ features:
.. data:: supports_dir_fd
- A :class:`~collections.abc.Set` object indicating which functions in the
- :mod:`os` module permit use of their *dir_fd* parameter. Different platforms
- provide different functionality, and an option that might work on one might
- be unsupported on another. For consistency's sakes, functions that support
- *dir_fd* always allow specifying the parameter, but will raise an exception
- if the functionality is not actually available.
-
- To check whether a particular function permits use of its *dir_fd*
- parameter, use the ``in`` operator on ``supports_dir_fd``. As an example,
- this expression determines whether the *dir_fd* parameter of :func:`os.stat`
- is locally available::
+ A :class:`set` object indicating which functions in the :mod:`os`
+ module accept an open file descriptor for their *dir_fd* parameter.
+ Different platforms provide different features, and the underlying
+ functionality Python uses to implement the *dir_fd* parameter is not
+ available on all platforms Python supports. For consistency's sake,
+ functions that may support *dir_fd* always allow specifying the
+ parameter, but will throw an exception if the functionality is used
+ when it's not locally available. (Specifying ``None`` for *dir_fd*
+ is always supported on all platforms.)
+
+ To check whether a particular function accepts an open file descriptor
+ for its *dir_fd* parameter, use the ``in`` operator on ``supports_dir_fd``.
+ As an example, this expression evaluates to ``True`` if :func:`os.stat`
+ accepts open file descriptors for *dir_fd* on the local platform::
os.stat in os.supports_dir_fd
- Currently *dir_fd* parameters only work on Unix platforms; none of them work
- on Windows.
+ Currently *dir_fd* parameters only work on Unix platforms;
+ none of them work on Windows.
.. versionadded:: 3.3
.. data:: supports_effective_ids
- A :class:`~collections.abc.Set` object indicating which functions in the
- :mod:`os` module permit use of the *effective_ids* parameter for
- :func:`os.access`. If the local platform supports it, the collection will
- contain :func:`os.access`, otherwise it will be empty.
+ A :class:`set` object indicating whether :func:`os.access` permits
+ specifying ``True`` for its *effective_ids* parameter on the local platform.
+ (Specifying ``False`` for *effective_ids* is always supported on all
+ platforms.) If the local platform supports it, the collection will contain
+ :func:`os.access`; otherwise it will be empty.
- To check whether you can use the *effective_ids* parameter for
- :func:`os.access`, use the ``in`` operator on ``supports_effective_ids``,
- like so::
+ This expression evaluates to ``True`` if :func:`os.access` supports
+ ``effective_ids=True`` on the local platform::
os.access in os.supports_effective_ids
- Currently *effective_ids* only works on Unix platforms; it does not work on
- Windows.
+ Currently *effective_ids* is only supported on Unix platforms;
+ it does not work on Windows.
.. versionadded:: 3.3
.. data:: supports_fd
- A :class:`~collections.abc.Set` object indicating which functions in the
+ A :class:`set` object indicating which functions in the
:mod:`os` module permit specifying their *path* parameter as an open file
- descriptor. Different platforms provide different functionality, and an
- option that might work on one might be unsupported on another. For
- consistency's sakes, functions that support *fd* always allow specifying
- the parameter, but will raise an exception if the functionality is not
- actually available.
+ descriptor on the local platform. Different platforms provide different
+ features, and the underlying functionality Python uses to accept open file
+ descriptors as *path* arguments is not available on all platforms Python
+ supports.
- To check whether a particular function permits specifying an open file
+ To determine whether a particular function permits specifying an open file
descriptor for its *path* parameter, use the ``in`` operator on
- ``supports_fd``. As an example, this expression determines whether
- :func:`os.chdir` accepts open file descriptors when called on your local
+ ``supports_fd``. As an example, this expression evaluates to ``True`` if
+ :func:`os.chdir` accepts open file descriptors for *path* on your local
platform::
os.chdir in os.supports_fd
@@ -2672,17 +2678,21 @@ features:
.. data:: supports_follow_symlinks
- A :class:`~collections.abc.Set` object indicating which functions in the
- :mod:`os` module permit use of their *follow_symlinks* parameter. Different
- platforms provide different functionality, and an option that might work on
- one might be unsupported on another. For consistency's sakes, functions that
- support *follow_symlinks* always allow specifying the parameter, but will
- raise an exception if the functionality is not actually available.
-
- To check whether a particular function permits use of its *follow_symlinks*
- parameter, use the ``in`` operator on ``supports_follow_symlinks``. As an
- example, this expression determines whether the *follow_symlinks* parameter
- of :func:`os.stat` is locally available::
+ A :class:`set` object indicating which functions in the :mod:`os` module
+ accept ``False`` for their *follow_symlinks* parameter on the local platform.
+ Different platforms provide different features, and the underlying
+ functionality Python uses to implement *follow_symlinks* is not available
+ on all platforms Python supports. For consistency's sake, functions that
+ may support *follow_symlinks* always allow specifying the parameter, but
+ will throw an exception if the functionality is used when it's not locally
+ available. (Specifying ``True`` for *follow_symlinks* is always supported
+ on all platforms.)
+
+ To check whether a particular function accepts ``False`` for its
+ *follow_symlinks* parameter, use the ``in`` operator on
+ ``supports_follow_symlinks``. As an example, this expression evaluates
+ to ``True`` if you may specify ``follow_symlinks=False`` when calling
+ :func:`os.stat` on the local platform::
os.stat in os.supports_follow_symlinks
@@ -2801,7 +2811,7 @@ features:
following symlinks `.
.. versionadded:: 3.3
- Added support for specifying an open file descriptor for *path*,
+ Added support for specifying *path* as an open file descriptor,
and the *dir_fd*, *follow_symlinks*, and *ns* parameters.
.. versionchanged:: 3.6
@@ -3162,7 +3172,7 @@ to be ignored.
.. availability:: Unix, Windows.
.. versionadded:: 3.3
- Added support for specifying an open file descriptor for *path*
+ Added support for specifying *path* as an open file descriptor
for :func:`execve`.
.. versionchanged:: 3.6
diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst
index 450e8ff378a3a5..41aebc4f61c83c 100644
--- a/Doc/library/pathlib.rst
+++ b/Doc/library/pathlib.rst
@@ -976,7 +976,7 @@ call fails (for example because the path doesn't exist).
is raised.
.. versionadded:: 3.6
- The *strict* argument.
+ The *strict* argument (pre-3.6 behavior is strict).
.. method:: Path.rglob(pattern)
@@ -1054,6 +1054,13 @@ call fails (for example because the path doesn't exist).
use :func:`Path.rmdir` instead.
+.. method:: Path.link_to(target)
+
+ Create a hard link pointing to a path named *target*.
+
+ .. versionchanged:: 3.8
+
+
.. method:: Path.write_bytes(data)
Open the file pointed to in bytes mode, write *data* to it, and close the
diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst
index 60c6089ad3ccb5..e07f9d613a0d21 100644
--- a/Doc/library/platform.rst
+++ b/Doc/library/platform.rst
@@ -216,6 +216,21 @@ Windows Platform
later (support for this was added in Python 2.6). It obviously
only runs on Win32 compatible platforms.
+.. function:: win32_edition()
+
+ Returns a string representing the current Windows edition. Possible
+ values include but are not limited to ``'Enterprise'``, ``'IoTUAP'``,
+ ``'ServerStandard'``, and ``'nanoserver'``.
+
+ .. versionadded:: 3.8
+
+.. function:: win32_is_iot()
+
+ Returns True if the windows edition returned by win32_edition is recognized
+ as an IoT edition.
+
+ .. versionadded:: 3.8
+
Mac OS Platform
---------------
diff --git a/Doc/library/pyclbr.rst b/Doc/library/pyclbr.rst
index a70c8df6a7b119..b80a2faed9b424 100644
--- a/Doc/library/pyclbr.rst
+++ b/Doc/library/pyclbr.rst
@@ -44,7 +44,7 @@ modules.
.. versionadded:: 3.7
Descriptors for nested definitions. They are accessed through the
- new children attibute. Each has a new parent attribute.
+ new children attribute. Each has a new parent attribute.
The descriptors returned by these functions are instances of
Function and Class classes. Users are not expected to create instances
diff --git a/Doc/library/random.rst b/Doc/library/random.rst
index 79a7bddad49792..fcedba4dbc2052 100644
--- a/Doc/library/random.rst
+++ b/Doc/library/random.rst
@@ -310,6 +310,11 @@ be found in any statistics text.
Alternative Generator
---------------------
+.. class:: Random([seed])
+
+ Class that implements the default pseudo-random number generator used by the
+ :mod:`random` module.
+
.. class:: SystemRandom([seed])
Class that uses the :func:`os.urandom` function for generating random numbers
diff --git a/Doc/library/re.rst b/Doc/library/re.rst
index 4ac5dee1407110..5ef72b535ce8d4 100644
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -908,6 +908,7 @@ form.
Unknown escapes in *repl* consisting of ``'\'`` and an ASCII letter
now are errors.
+ .. versionchanged:: 3.7
Empty matches for the pattern are replaced when adjacent to a previous
non-empty match.
diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst
index 2ed15c13673668..3573da7ea2d716 100644
--- a/Doc/library/resource.rst
+++ b/Doc/library/resource.rst
@@ -76,6 +76,8 @@ this module for those platforms.
``setrlimit`` may also raise :exc:`error` if the underlying system call
fails.
+ VxWorks only supports setting :data:`RLIMIT_NOFILE`.
+
.. function:: prlimit(pid, resource[, limits])
Combines :func:`setrlimit` and :func:`getrlimit` in one function and
diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst
index ac6cad9aff8e97..01200b4df8803e 100644
--- a/Doc/library/signal.rst
+++ b/Doc/library/signal.rst
@@ -16,7 +16,8 @@ The :func:`signal.signal` function allows defining custom handlers to be
executed when a signal is received. A small number of default handlers are
installed: :const:`SIGPIPE` is ignored (so write errors on pipes and sockets
can be reported as ordinary Python exceptions) and :const:`SIGINT` is
-translated into a :exc:`KeyboardInterrupt` exception.
+translated into a :exc:`KeyboardInterrupt` exception if the parent process
+has not changed it.
A handler for a particular signal, once set, remains installed until it is
explicitly reset (Python emulates the BSD style interface regardless of the
diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst
index 6a441983f8884e..20f5724447164d 100644
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -665,7 +665,7 @@ Constants
.. data:: PROTOCOL_SSLv23
- Alias for data:`PROTOCOL_TLS`.
+ Alias for :data:`PROTOCOL_TLS`.
.. deprecated:: 3.6
@@ -1821,7 +1821,7 @@ to speed up repeated connections from the same clients.
.. attribute:: SSLContext.sslsocket_class
- The return type of :meth:`SSLContext.wrap_sockets`, defaults to
+ The return type of :meth:`SSLContext.wrap_socket`, defaults to
:class:`SSLSocket`. The attribute can be overridden on instance of class
in order to return a custom subclass of :class:`SSLSocket`.
@@ -1831,7 +1831,7 @@ to speed up repeated connections from the same clients.
server_hostname=None, session=None)
Wrap the BIO objects *incoming* and *outgoing* and return an instance of
- attr:`SSLContext.sslobject_class` (default :class:`SSLObject`). The SSL
+ :attr:`SSLContext.sslobject_class` (default :class:`SSLObject`). The SSL
routines will read input data from the incoming BIO and write data to the
outgoing BIO.
diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst
index 8bb2bdf7b697de..fb7df4e7188a07 100644
--- a/Doc/library/statistics.rst
+++ b/Doc/library/statistics.rst
@@ -48,6 +48,7 @@ or sample.
:func:`median_grouped` Median, or 50th percentile, of grouped data.
:func:`mode` Single mode (most common value) of discrete or nominal data.
:func:`multimode` List of modes (most common values) of discrete or nomimal data.
+:func:`quantiles` Divide data into intervals with equal probability.
======================= ===============================================================
Measures of spread
@@ -499,6 +500,53 @@ However, for reading convenience, most of the examples show sorted sequences.
:func:`pvariance` function as the *mu* parameter to get the variance of a
sample.
+.. function:: quantiles(dist, *, n=4, method='exclusive')
+
+ Divide *dist* into *n* continuous intervals with equal probability.
+ Returns a list of ``n - 1`` cut points separating the intervals.
+
+ Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. Set
+ *n* to 100 for percentiles which gives the 99 cuts points that separate
+ *dist* in to 100 equal sized groups. Raises :exc:`StatisticsError` if *n*
+ is not least 1.
+
+ The *dist* can be any iterable containing sample data or it can be an
+ instance of a class that defines an :meth:`~inv_cdf` method.
+ Raises :exc:`StatisticsError` if there are not at least two data points.
+
+ For sample data, the cut points are linearly interpolated from the
+ two nearest data points. For example, if a cut point falls one-third
+ of the distance between two sample values, ``100`` and ``112``, the
+ cut-point will evaluate to ``104``. Other selection methods may be
+ offered in the future (for example choose ``100`` as the nearest
+ value or compute ``106`` as the midpoint). This might matter if
+ there are too few samples for a given number of cut points.
+
+ If *method* is set to *inclusive*, *dist* is treated as population data.
+ The minimum value is treated as the 0th percentile and the maximum
+ value is treated as the 100th percentile. If *dist* is an instance of
+ a class that defines an :meth:`~inv_cdf` method, setting *method*
+ has no effect.
+
+ .. doctest::
+
+ # Decile cut points for empirically sampled data
+ >>> data = [105, 129, 87, 86, 111, 111, 89, 81, 108, 92, 110,
+ ... 100, 75, 105, 103, 109, 76, 119, 99, 91, 103, 129,
+ ... 106, 101, 84, 111, 74, 87, 86, 103, 103, 106, 86,
+ ... 111, 75, 87, 102, 121, 111, 88, 89, 101, 106, 95,
+ ... 103, 107, 101, 81, 109, 104]
+ >>> [round(q, 1) for q in quantiles(data, n=10)]
+ [81.0, 86.2, 89.0, 99.4, 102.5, 103.6, 106.0, 109.8, 111.0]
+
+ >>> # Quartile cut points for the standard normal distibution
+ >>> Z = NormalDist()
+ >>> [round(q, 4) for q in quantiles(Z, n=4)]
+ [-0.6745, 0.0, 0.6745]
+
+ .. versionadded:: 3.8
+
+
Exceptions
----------
@@ -559,7 +607,7 @@ of applications in statistics.
:exc:`StatisticsError` because it takes at least one point to estimate
a central value and at least two points to estimate dispersion.
- .. method:: NormalDist.samples(n, seed=None)
+ .. method:: NormalDist.samples(n, *, seed=None)
Generates *n* random samples for a given mean and standard deviation.
Returns a :class:`list` of :class:`float` values.
@@ -606,7 +654,7 @@ of applications in statistics.
`_
between two normal distributions, giving a measure of agreement.
Returns a value between 0.0 and 1.0 giving `the overlapping area for
- two probability density functions
+ the two probability density functions
`_.
Instances of :class:`NormalDist` support addition, subtraction,
@@ -649,8 +697,8 @@ of applications in statistics.
For example, given `historical data for SAT exams
`_ showing that scores
are normally distributed with a mean of 1060 and a standard deviation of 192,
-determine the percentage of students with scores between 1100 and 1200, after
-rounding to the nearest whole number:
+determine the percentage of students with test scores between 1100 and
+1200, after rounding to the nearest whole number:
.. doctest::
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index aeecdbb24a57b1..0a6bb149075f47 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -4254,7 +4254,10 @@ pairs within braces, for example: ``{'jack': 4098, 'sjoerd': 4127}`` or ``{4098:
Create a new dictionary with keys from *iterable* and values set to *value*.
:meth:`fromkeys` is a class method that returns a new dictionary. *value*
- defaults to ``None``.
+ defaults to ``None``. All of the values refer to just a single instance,
+ so it generally doesn't make sense for *value* to be a mutable object
+ such as an empty list. To get distinct values, use a :ref:`dict
+ comprehension ` instead.
.. method:: get(key[, default])
diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst
index ca0813c7830ad0..3280c95cacbbc3 100644
--- a/Doc/library/subprocess.rst
+++ b/Doc/library/subprocess.rst
@@ -567,6 +567,13 @@ functions.
Popen destructor now emits a :exc:`ResourceWarning` warning if the child
process is still running.
+ .. versionchanged:: 3.8
+ Popen can use :func:`os.posix_spawn` in some cases for better
+ performance. On Windows Subsystem for Linux and QEMU User Emulation,
+ Popen constructor using :func:`os.posix_spawn` no longer raise an
+ exception on errors like missing program, but the child process fails
+ with a non-zero :attr:`~Popen.returncode`.
+
Exceptions
^^^^^^^^^^
diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst
index d7dbcb107ddadb..c58a6ad75d08fd 100644
--- a/Doc/library/threading.rst
+++ b/Doc/library/threading.rst
@@ -250,7 +250,7 @@ since it is impossible to detect the termination of alien threads.
You may override this method in a subclass. The standard :meth:`run`
method invokes the callable object passed to the object's constructor as
- the *target* argument, if any, with sequential and keyword arguments taken
+ the *target* argument, if any, with positional and keyword arguments taken
from the *args* and *kwargs* arguments, respectively.
.. method:: join(timeout=None)
diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst
index ed5f547e3ce371..c2523ed5296003 100644
--- a/Doc/library/typing.rst
+++ b/Doc/library/typing.rst
@@ -966,8 +966,8 @@ The module defines the following classes, functions and decorators:
def fetch_response() -> Response: ...
- Note that returning instances of private classes is not recommended.
- It is usually preferable to make such classes public.
+ Note that returning instances of private classes is not recommended.
+ It is usually preferable to make such classes public.
.. data:: Any
diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst
index 412808ad44866d..4f083a3181e7a9 100644
--- a/Doc/library/venv.rst
+++ b/Doc/library/venv.rst
@@ -234,14 +234,19 @@ creation according to their needs, the :class:`EnvBuilder` class.
There is also a module-level convenience function:
.. function:: create(env_dir, system_site_packages=False, clear=False, \
- symlinks=False, with_pip=False)
+ symlinks=False, with_pip=False, prompt=None)
Create an :class:`EnvBuilder` with the given keyword arguments, and call its
:meth:`~EnvBuilder.create` method with the *env_dir* argument.
+ .. versionadded:: 3.3
+
.. versionchanged:: 3.4
Added the ``with_pip`` parameter
+ .. versionchanged:: 3.6
+ Added the ``prompt`` parameter
+
An example of extending ``EnvBuilder``
--------------------------------------
diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst
index 7f3d267d74c2ec..80a908bbd83b0a 100644
--- a/Doc/library/weakref.rst
+++ b/Doc/library/weakref.rst
@@ -139,6 +139,10 @@ Extension types can easily be made to support weak references; see
prevent their use as dictionary keys. *callback* is the same as the parameter
of the same name to the :func:`ref` function.
+ .. versionchanged:: 3.8
+ Extended the operator support on proxy objects to include the matrix
+ multiplication operators ``@`` and ``@=``.
+
.. function:: getweakrefcount(object)
diff --git a/Doc/library/wsgiref.rst b/Doc/library/wsgiref.rst
index b85ec53c8ae536..6edd0714b9df3b 100644
--- a/Doc/library/wsgiref.rst
+++ b/Doc/library/wsgiref.rst
@@ -767,7 +767,7 @@ This is a working "Hello World" WSGI application::
# use a function (note that you're not limited to a function, you can
# use a class for example). The first argument passed to the function
# is a dictionary containing CGI-style environment variables and the
- # second variable is the callable object (see PEP 333).
+ # second variable is the callable object.
def hello_world_app(environ, start_response):
status = '200 OK' # HTTP Status
headers = [('Content-type', 'text/plain; charset=utf-8')] # HTTP Headers
@@ -783,33 +783,7 @@ This is a working "Hello World" WSGI application::
httpd.serve_forever()
-Example of a small wsgiref-based web server::
-
- # Takes a path to serve from and an optional port number (defaults to 8000),
- # then tries to serve files. Mime types are guessed from the file names, 404
- # errors are raised if the file is not found.
- import sys
- import os
- import mimetypes
- from wsgiref import simple_server, util
-
- def app(environ, respond):
- fn = os.path.join(path, environ['PATH_INFO'][1:])
- if '.' not in fn.split(os.path.sep)[-1]:
- fn = os.path.join(fn, 'index.html')
- type = mimetypes.guess_type(fn)[0]
-
- if os.path.exists(fn):
- respond('200 OK', [('Content-Type', type)])
- return util.FileWrapper(open(fn, "rb"))
- else:
- respond('404 Not Found', [('Content-Type', 'text/plain')])
- return [b'not found']
-
- path = sys.argv[1]
- port = int(sys.argv[2]) if len(sys.argv) > 2 else 8000
- with simple_server.make_server('', port, app) as httpd:
- print("Serving {} on port {}, control-C to stop".format(path, port))
-
- # Serve until process is killed
- httpd.serve_forever()
+Example of a WSGI application serving the current directory, accept optional
+directory and port number (default: 8000) on the command line:
+
+.. literalinclude:: ../../Tools/scripts/serve.py
diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst
index c83e719e959a29..c4667315793e4c 100644
--- a/Doc/library/xml.etree.elementtree.rst
+++ b/Doc/library/xml.etree.elementtree.rst
@@ -399,6 +399,12 @@ module. We'll be using the ``countrydata`` XML document from the
# All 'neighbor' nodes that are the second child of their parent
root.findall(".//neighbor[2]")
+For XML with namespaces, use the usual qualified ``{namespace}tag`` notation::
+
+ # All dublin-core "title" tags in the document
+ root.findall(".//{http://purl.org/dc/elements/1.1/}title")
+
+
Supported XPath syntax
^^^^^^^^^^^^^^^^^^^^^^
@@ -411,9 +417,16 @@ Supported XPath syntax
| | For example, ``spam`` selects all child elements |
| | named ``spam``, and ``spam/egg`` selects all |
| | grandchildren named ``egg`` in all children named |
-| | ``spam``. |
+| | ``spam``. ``{namespace}*`` selects all tags in the |
+| | given namespace, ``{*}spam`` selects tags named |
+| | ``spam`` in any (or no) namespace, and ``{}*`` |
+| | only selects tags that are not in a namespace. |
+| | |
+| | .. versionchanged:: 3.8 |
+| | Support for star-wildcards was added. |
+-----------------------+------------------------------------------------------+
-| ``*`` | Selects all child elements. For example, ``*/egg`` |
+| ``*`` | Selects all child elements, including comments and |
+| | processing instructions. For example, ``*/egg`` |
| | selects all grandchildren named ``egg``. |
+-----------------------+------------------------------------------------------+
| ``.`` | Selects the current node. This is mostly useful |
@@ -465,6 +478,53 @@ Reference
Functions
^^^^^^^^^
+.. function:: canonicalize(xml_data=None, *, out=None, from_file=None, **options)
+
+ `C14N 2.0 `_ transformation function.
+
+ Canonicalization is a way to normalise XML output in a way that allows
+ byte-by-byte comparisons and digital signatures. It reduced the freedom
+ that XML serializers have and instead generates a more constrained XML
+ representation. The main restrictions regard the placement of namespace
+ declarations, the ordering of attributes, and ignorable whitespace.
+
+ This function takes an XML data string (*xml_data*) or a file path or
+ file-like object (*from_file*) as input, converts it to the canonical
+ form, and writes it out using the *out* file(-like) object, if provided,
+ or returns it as a text string if not. The output file receives text,
+ not bytes. It should therefore be opened in text mode with ``utf-8``
+ encoding.
+
+ Typical uses::
+
+ xml_data = "..."
+ print(canonicalize(xml_data))
+
+ with open("c14n_output.xml", mode='w', encoding='utf-8') as out_file:
+ canonicalize(xml_data, out=out_file)
+
+ with open("c14n_output.xml", mode='w', encoding='utf-8') as out_file:
+ canonicalize(from_file="inputfile.xml", out=out_file)
+
+ The configuration *options* are as follows:
+
+ - *with_comments*: set to true to include comments (default: false)
+ - *strip_text*: set to true to strip whitespace before and after text content
+ (default: false)
+ - *rewrite_prefixes*: set to true to replace namespace prefixes by "n{number}"
+ (default: false)
+ - *qname_aware_tags*: a set of qname aware tag names in which prefixes
+ should be replaced in text content (default: empty)
+ - *qname_aware_attrs*: a set of qname aware attribute names in which prefixes
+ should be replaced in text content (default: empty)
+ - *exclude_attrs*: a set of attribute names that should not be serialised
+ - *exclude_tags*: a set of tag names that should not be serialised
+
+ In the option list above, "a set" refers to any collection or iterable of
+ strings, no ordering is expected.
+
+ .. versionadded:: 3.8
+
.. function:: Comment(text=None)
@@ -523,8 +583,9 @@ Functions
Parses an XML section into an element tree incrementally, and reports what's
going on to the user. *source* is a filename or :term:`file object`
containing XML data. *events* is a sequence of events to report back. The
- supported events are the strings ``"start"``, ``"end"``, ``"start-ns"`` and
- ``"end-ns"`` (the "ns" events are used to get detailed namespace
+ supported events are the strings ``"start"``, ``"end"``, ``"comment"``,
+ ``"pi"``, ``"start-ns"`` and ``"end-ns"``
+ (the "ns" events are used to get detailed namespace
information). If *events* is omitted, only ``"end"`` events are reported.
*parser* is an optional parser instance. If not given, the standard
:class:`XMLParser` parser is used. *parser* must be a subclass of
@@ -549,6 +610,10 @@ Functions
.. deprecated:: 3.4
The *parser* argument.
+ .. versionchanged:: 3.8
+ The ``comment`` and ``pi`` events were added.
+
+
.. function:: parse(source, parser=None)
Parses an XML section into an element tree. *source* is a filename or file
@@ -764,7 +829,7 @@ Element Objects
Finds the first subelement matching *match*. *match* may be a tag name
or a :ref:`path `. Returns an element instance
or ``None``. *namespaces* is an optional mapping from namespace prefix
- to full name. Pass ``None`` as prefix to move all unprefixed tag names
+ to full name. Pass ``''`` as prefix to move all unprefixed tag names
in the expression into the given namespace.
@@ -773,7 +838,7 @@ Element Objects
Finds all matching subelements, by tag name or
:ref:`path `. Returns a list containing all matching
elements in document order. *namespaces* is an optional mapping from
- namespace prefix to full name. Pass ``None`` as prefix to move all
+ namespace prefix to full name. Pass ``''`` as prefix to move all
unprefixed tag names in the expression into the given namespace.
@@ -784,7 +849,7 @@ Element Objects
of the first matching element, or *default* if no element was found.
Note that if the matching element has no text content an empty string
is returned. *namespaces* is an optional mapping from namespace prefix
- to full name. Pass ``None`` as prefix to move all unprefixed tag names
+ to full name. Pass ``''`` as prefix to move all unprefixed tag names
in the expression into the given namespace.
@@ -1021,14 +1086,24 @@ TreeBuilder Objects
^^^^^^^^^^^^^^^^^^^
-.. class:: TreeBuilder(element_factory=None)
+.. class:: TreeBuilder(element_factory=None, *, comment_factory=None, \
+ pi_factory=None, insert_comments=False, insert_pis=False)
Generic element structure builder. This builder converts a sequence of
- start, data, and end method calls to a well-formed element structure. You
- can use this class to build an element structure using a custom XML parser,
- or a parser for some other XML-like format. *element_factory*, when given,
- must be a callable accepting two positional arguments: a tag and
- a dict of attributes. It is expected to return a new element instance.
+ start, data, end, comment and pi method calls to a well-formed element
+ structure. You can use this class to build an element structure using
+ a custom XML parser, or a parser for some other XML-like format.
+
+ *element_factory*, when given, must be a callable accepting two positional
+ arguments: a tag and a dict of attributes. It is expected to return a new
+ element instance.
+
+ The *comment_factory* and *pi_factory* functions, when given, should behave
+ like the :func:`Comment` and :func:`ProcessingInstruction` functions to
+ create comments and processing instructions. When not given, the default
+ factories will be used. When *insert_comments* and/or *insert_pis* is true,
+ comments/pis will be inserted into the tree if they appear within the root
+ element (but not outside of it).
.. method:: close()
@@ -1054,8 +1129,24 @@ TreeBuilder Objects
containing element attributes. Returns the opened element.
+ .. method:: comment(text)
+
+ Creates a comment with the given *text*. If ``insert_comments`` is true,
+ this will also add it to the tree.
+
+ .. versionadded:: 3.8
+
+
+ .. method:: pi(target, text)
+
+ Creates a comment with the given *target* name and *text*. If
+ ``insert_pis`` is true, this will also add it to the tree.
+
+ .. versionadded:: 3.8
+
+
In addition, a custom :class:`TreeBuilder` object can provide the
- following method:
+ following methods:
.. method:: doctype(name, pubid, system)
@@ -1065,6 +1156,36 @@ TreeBuilder Objects
.. versionadded:: 3.2
+ .. method:: start_ns(prefix, uri)
+
+ Is called whenever the parser encounters a new namespace declaration,
+ before the ``start()`` callback for the opening element that defines it.
+ *prefix* is ``''`` for the default namespace and the declared
+ namespace prefix name otherwise. *uri* is the namespace URI.
+
+ .. versionadded:: 3.8
+
+ .. method:: end_ns(prefix)
+
+ Is called after the ``end()`` callback of an element that declared
+ a namespace prefix mapping, with the name of the *prefix* that went
+ out of scope.
+
+ .. versionadded:: 3.8
+
+
+.. class:: C14NWriterTarget(write, *, \
+ with_comments=False, strip_text=False, rewrite_prefixes=False, \
+ qname_aware_tags=None, qname_aware_attrs=None, \
+ exclude_attrs=None, exclude_tags=None)
+
+ A `C14N 2.0 `_ writer. Arguments are the
+ same as for the :func:`canonicalize` function. This class does not build a
+ tree but translates the callback events directly into a serialised form
+ using the *write* function.
+
+ .. versionadded:: 3.8
+
.. _elementtree-xmlparser-objects:
@@ -1100,7 +1221,8 @@ XMLParser Objects
:meth:`XMLParser.feed` calls *target*\'s ``start(tag, attrs_dict)`` method
for each opening tag, its ``end(tag)`` method for each closing tag, and data
- is processed by method ``data(data)``. :meth:`XMLParser.close` calls
+ is processed by method ``data(data)``. For further supported callback
+ methods, see the :class:`TreeBuilder` class. :meth:`XMLParser.close` calls
*target*\'s method ``close()``. :class:`XMLParser` can be used not only for
building a tree structure. This is an example of counting the maximum depth
of an XML file::
@@ -1150,9 +1272,9 @@ XMLPullParser Objects
callback target, :class:`XMLPullParser` collects an internal list of parsing
events and lets the user read from it. *events* is a sequence of events to
report back. The supported events are the strings ``"start"``, ``"end"``,
- ``"start-ns"`` and ``"end-ns"`` (the "ns" events are used to get detailed
- namespace information). If *events* is omitted, only ``"end"`` events are
- reported.
+ ``"comment"``, ``"pi"``, ``"start-ns"`` and ``"end-ns"`` (the "ns" events
+ are used to get detailed namespace information). If *events* is omitted,
+ only ``"end"`` events are reported.
.. method:: feed(data)
@@ -1171,7 +1293,13 @@ XMLPullParser Objects
data fed to the
parser. The iterator yields ``(event, elem)`` pairs, where *event* is a
string representing the type of event (e.g. ``"end"``) and *elem* is the
- encountered :class:`Element` object.
+ encountered :class:`Element` object, or other context value as follows.
+
+ * ``start``, ``end``: the current Element.
+ * ``comment``, ``pi``: the current comment / processing instruction
+ * ``start-ns``: a tuple ``(prefix, uri)`` naming the declared namespace
+ mapping.
+ * ``end-ns``: :const:`None` (this may change in a future version)
Events provided in a previous call to :meth:`read_events` will not be
yielded again. Events are consumed from the internal queue only when
@@ -1191,6 +1319,10 @@ XMLPullParser Objects
.. versionadded:: 3.4
+ .. versionchanged:: 3.8
+ The ``comment`` and ``pi`` events were added.
+
+
Exceptions
^^^^^^^^^^
diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst
index 4e9edff270143d..9db9697105d6b5 100644
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -52,6 +52,15 @@ The module defines the following items:
:ref:`zipfile-objects` for constructor details.
+.. class:: Path
+ :noindex:
+
+ A pathlib-compatible wrapper for zip files. See section
+ :ref:`path-objects` for details.
+
+ .. versionadded:: 3.8
+
+
.. class:: PyZipFile
:noindex:
@@ -456,6 +465,64 @@ The following data attributes are also available:
truncated.
+.. _path-objects:
+
+Path Objects
+------------
+
+.. class:: Path(root, at='')
+
+ Construct a Path object from a ``root`` zipfile (which may be a
+ :class:`ZipFile` instance or ``file`` suitable for passing to
+ the :class:`ZipFile` constructor).
+
+ ``at`` specifies the location of this Path within the zipfile,
+ e.g. 'dir/file.txt', 'dir/', or ''. Defaults to the empty string,
+ indicating the root.
+
+Path objects expose the following features of :mod:`pathlib.Path`
+objects:
+
+Path objects are traversable using the ``/`` operator.
+
+.. attribute:: Path.name
+
+ The final path component.
+
+.. method:: Path.open(*, **)
+
+ Invoke :meth:`ZipFile.open` on the current path. Accepts
+ the same arguments as :meth:`ZipFile.open`.
+
+.. method:: Path.listdir()
+
+ Enumerate the children of the current directory.
+
+.. method:: Path.is_dir()
+
+ Return ``True`` if the current context references a directory.
+
+.. method:: Path.is_file()
+
+ Return ``True`` if the current context references a file.
+
+.. method:: Path.exists()
+
+ Return ``True`` if the current context references a file or
+ directory in the zip file.
+
+.. method:: Path.read_text(*, **)
+
+ Read the current file as unicode text. Positional and
+ keyword arguments are passed through to
+ :class:`io.TextIOWrapper` (except ``buffer``, which is
+ implied by the context).
+
+.. method:: Path.read_bytes()
+
+ Read the current file as bytes.
+
+
.. _pyzipfile-objects:
PyZipFile Objects
diff --git a/Doc/license.rst b/Doc/license.rst
index a315b6f8134d48..bf2e4c522ce172 100644
--- a/Doc/license.rst
+++ b/Doc/license.rst
@@ -561,7 +561,7 @@ SipHash24
---------
The file :file:`Python/pyhash.c` contains Marek Majkowski' implementation of
-Dan Bernstein's SipHash24 algorithm. The contains the following note::
+Dan Bernstein's SipHash24 algorithm. It contains the following note::
Copyright (c) 2013 Marek Majkowski
@@ -913,3 +913,40 @@ library unless the build is configured ``--with-system-libmpdec``::
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
+
+
+W3C C14N test suite
+-------------------
+
+The C14N 2.0 test suite in the :mod:`test` package
+(``Lib/test/xmltestdata/c14n-20/``) was retrieved from the W3C website at
+https://www.w3.org/TR/xml-c14n2-testcases/ and is distributed under the
+3-clause BSD license:
+
+ Copyright (c) 2013 W3C(R) (MIT, ERCIM, Keio, Beihang),
+ All Rights Reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of works must retain the original copyright notice,
+ this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the original copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of the W3C nor the names of its contributors may be
+ used to endorse or promote products derived from this work without
+ specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index 1683d25db9247a..9fc9f3a3848a4e 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -1311,9 +1311,9 @@ Basic customization
Called by the :func:`format` built-in function,
and by extension, evaluation of :ref:`formatted string literals
` and the :meth:`str.format` method, to produce a "formatted"
- string representation of an object. The ``format_spec`` argument is
+ string representation of an object. The *format_spec* argument is
a string that contains a description of the formatting options desired.
- The interpretation of the ``format_spec`` argument is up to the type
+ The interpretation of the *format_spec* argument is up to the type
implementing :meth:`__format__`, however most classes will either
delegate formatting to one of the built-in types, or use a similar
formatting option syntax.
@@ -2680,13 +2680,13 @@ Asynchronous context managers can be used in an :keyword:`async with` statement.
.. method:: object.__aenter__(self)
- This method is semantically similar to the :meth:`__enter__`, with only
- difference that it must return an *awaitable*.
+ Semantically similar to :meth:`__enter__`, the only
+ difference being that it must return an *awaitable*.
.. method:: object.__aexit__(self, exc_type, exc_value, traceback)
- This method is semantically similar to the :meth:`__exit__`, with only
- difference that it must return an *awaitable*.
+ Semantically similar to :meth:`__exit__`, the only
+ difference being that it must return an *awaitable*.
An example of an asynchronous context manager class::
diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst
index 9a0ab39d3b4a3e..0228bfb7e984c5 100644
--- a/Doc/reference/import.rst
+++ b/Doc/reference/import.rst
@@ -345,12 +345,11 @@ of what happens during the loading portion of import::
_init_module_attrs(spec, module)
if spec.loader is None:
- if spec.submodule_search_locations is not None:
- # namespace package
- sys.modules[spec.name] = module
- else:
- # unsupported
- raise ImportError
+ # unsupported
+ raise ImportError
+ if spec.origin is None and spec.submodule_search_locations is not None:
+ # namespace package
+ sys.modules[spec.name] = module
elif not hasattr(spec.loader, 'exec_module'):
module = spec.loader.load_module(spec.name)
# Set __loader__ and __package__ if missing.
@@ -921,6 +920,46 @@ it is sufficient to raise :exc:`ModuleNotFoundError` directly from
``None``. The latter indicates that the meta path search should continue,
while raising an exception terminates it immediately.
+.. _relativeimports:
+
+Package Relative Imports
+========================
+
+Relative imports use leading dots. A single leading dot indicates a relative
+import, starting with the current package. Two or more leading dots indicate a
+relative import to the parent(s) of the current package, one level per dot
+after the first. For example, given the following package layout::
+
+ package/
+ __init__.py
+ subpackage1/
+ __init__.py
+ moduleX.py
+ moduleY.py
+ subpackage2/
+ __init__.py
+ moduleZ.py
+ moduleA.py
+
+In either ``subpackage1/moduleX.py`` or ``subpackage1/__init__.py``,
+the following are valid relative imports::
+
+ from .moduleY import spam
+ from .moduleY import spam as ham
+ from . import moduleY
+ from ..subpackage1 import moduleY
+ from ..subpackage2.moduleZ import eggs
+ from ..moduleA import foo
+
+Absolute imports may use either the ``import <>`` or ``from <> import <>``
+syntax, but relative imports may only use the second form; the reason
+for this is that::
+
+ import XXX.YYY.ZZZ
+
+should expose ``XXX.YYY.ZZZ`` as a usable expression, but .moduleY is
+not a valid expression.
+
Special considerations for __main__
===================================
diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst
index fb04ccc839aaa3..13adc1a2e433fc 100644
--- a/Doc/reference/lexical_analysis.rst
+++ b/Doc/reference/lexical_analysis.rst
@@ -680,11 +680,12 @@ with a closing curly bracket ``'}'``.
Expressions in formatted string literals are treated like regular
Python expressions surrounded by parentheses, with a few exceptions.
-An empty expression is not allowed, and a :keyword:`lambda` expression
-must be surrounded by explicit parentheses. Replacement expressions
-can contain line breaks (e.g. in triple-quoted strings), but they
-cannot contain comments. Each expression is evaluated in the context
-where the formatted string literal appears, in order from left to right.
+An empty expression is not allowed, and both :keyword:`lambda` and
+assignment expressions ``:=`` must be surrounded by explicit parentheses.
+Replacement expressions can contain line breaks (e.g. in triple-quoted
+strings), but they cannot contain comments. Each expression is evaluated
+in the context where the formatted string literal appears, in order from
+left to right.
If a conversion is specified, the result of evaluating the expression
is converted before formatting. Conversion ``'!s'`` calls :func:`str` on
diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst
index 00964afc6d3d2f..af7c0caff62797 100644
--- a/Doc/reference/simple_stmts.rst
+++ b/Doc/reference/simple_stmts.rst
@@ -169,12 +169,12 @@ Assignment of an object to a single target is recursively defined as follows.
.. _attr-target-note:
Note: If the object is a class instance and the attribute reference occurs on
- both sides of the assignment operator, the RHS expression, ``a.x`` can access
+ both sides of the assignment operator, the right-hand side expression, ``a.x`` can access
either an instance attribute or (if no instance attribute exists) a class
- attribute. The LHS target ``a.x`` is always set as an instance attribute,
+ attribute. The left-hand side target ``a.x`` is always set as an instance attribute,
creating it if necessary. Thus, the two occurrences of ``a.x`` do not
- necessarily refer to the same attribute: if the RHS expression refers to a
- class attribute, the LHS creates a new instance attribute as the target of the
+ necessarily refer to the same attribute: if the right-hand side expression refers to a
+ class attribute, the left-hand side creates a new instance attribute as the target of the
assignment::
class Cls:
@@ -828,7 +828,8 @@ exists. Two dots means up one package level. Three dots is up two levels, etc.
So if you execute ``from . import mod`` from a module in the ``pkg`` package
then you will end up importing ``pkg.mod``. If you execute ``from ..subpkg2
import mod`` from within ``pkg.subpkg1`` you will import ``pkg.subpkg2.mod``.
-The specification for relative imports is contained within :pep:`328`.
+The specification for relative imports is contained in
+the :ref:`relativeimports` section.
:func:`importlib.import_module` is provided to support applications that
determine dynamically the modules to be loaded.
diff --git a/Doc/tools/templates/download.html b/Doc/tools/templates/download.html
index 1a99b18bbb268e..d9364d6ced729f 100644
--- a/Doc/tools/templates/download.html
+++ b/Doc/tools/templates/download.html
@@ -12,8 +12,7 @@
Download Python {{ release }} Documentation
{% if last_updated %}
Last updated on: {{ last_updated }}.
{% endif %}
To download an archive containing all the documents for this version of
-Python in one of various formats, follow one of links in this table. The numbers
-in the table are the size of the download files in megabytes.
+Python in one of various formats, follow one of links in this table.
Format
Packed as .zip
Packed as .tar.bz2
diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst
index 79427860f518ff..fc2bd5578c4cf1 100644
--- a/Doc/tutorial/inputoutput.rst
+++ b/Doc/tutorial/inputoutput.rst
@@ -322,6 +322,8 @@ equivalent :keyword:`try`\ -\ :keyword:`finally` blocks::
>>> with open('workfile') as f:
... read_data = f.read()
+
+ >>> # We can check that the file has been automatically closed.
>>> f.closed
True
diff --git a/Doc/tutorial/introduction.rst b/Doc/tutorial/introduction.rst
index 3e0c99558ed7f3..a4dbd6351b77d8 100644
--- a/Doc/tutorial/introduction.rst
+++ b/Doc/tutorial/introduction.rst
@@ -383,7 +383,7 @@ items of different types, but usually the items all have the same type. ::
>>> squares
[1, 4, 9, 16, 25]
-Like strings (and all other built-in :term:`sequence` type), lists can be
+Like strings (and all other built-in :term:`sequence` types), lists can be
indexed and sliced::
>>> squares[0] # indexing returns the item
diff --git a/Doc/tutorial/modules.rst b/Doc/tutorial/modules.rst
index fd594fd97af4d4..d0a68faa2ee250 100644
--- a/Doc/tutorial/modules.rst
+++ b/Doc/tutorial/modules.rst
@@ -523,7 +523,7 @@ Although certain modules are designed to export only names that follow certain
patterns when you use ``import *``, it is still considered bad practice in
production code.
-Remember, there is nothing wrong with using ``from Package import
+Remember, there is nothing wrong with using ``from package import
specific_submodule``! In fact, this is the recommended notation unless the
importing module needs to use submodules with the same name from different
packages.
diff --git a/Doc/tutorial/whatnow.rst b/Doc/tutorial/whatnow.rst
index d876d0740d8065..3208201312b871 100644
--- a/Doc/tutorial/whatnow.rst
+++ b/Doc/tutorial/whatnow.rst
@@ -39,7 +39,7 @@ More Python resources:
* https://docs.python.org: Fast access to Python's documentation.
* https://pypi.org: The Python Package Index, previously also nicknamed
- the Cheese Shop, is an index of user-created Python modules that are available
+ the Cheese Shop [#]_, is an index of user-created Python modules that are available
for download. Once you begin releasing code, you can register it here so that
others can find it.
@@ -68,3 +68,9 @@ Before posting, be sure to check the list of
:ref:`Frequently Asked Questions ` (also called the FAQ). The
FAQ answers many of the questions that come up again and again, and may
already contain the solution for your problem.
+
+.. rubric:: Footnotes
+
+.. [#] "Cheese Shop" is a Monty Python's sketch: a customer enters a cheese shop,
+ but whatever cheese he asks for, the clerk says it's missing.
+
diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst
index 0574336cf35403..fd47ce2ab53849 100644
--- a/Doc/using/cmdline.rst
+++ b/Doc/using/cmdline.rst
@@ -922,15 +922,18 @@ conflict.
Debug-mode variables
~~~~~~~~~~~~~~~~~~~~
-Setting these variables only has an effect in a debug build of Python, that is,
-if Python was configured with the ``--with-pydebug`` build option.
+Setting these variables only has an effect in a debug build of Python.
.. envvar:: PYTHONTHREADDEBUG
If set, Python will print threading debug info.
+ Need Python configured with the ``--with-pydebug`` build option.
+
.. envvar:: PYTHONDUMPREFS
If set, Python will dump objects and reference counts still alive after
shutting down the interpreter.
+
+ Need Python configured with the ``--with-trace-refs`` build option.
diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst
index 39a0da5e61e9ff..631b5ef218753e 100644
--- a/Doc/whatsnew/3.8.rst
+++ b/Doc/whatsnew/3.8.rst
@@ -67,6 +67,47 @@ Summary -- Release highlights
New Features
============
+Assignment expressions
+----------------------
+
+There is new syntax (the "walrus operator", ``:=``) to assign values
+to variables as part of an expression. Example::
+
+ if (n := len(a)) > 10:
+ print(f"List is too long ({n} elements, expected <= 10)")
+
+See :pep:`572` for a full description.
+
+(Contributed by Emily Morehouse in :issue:`35224`.)
+
+.. TODO: Emily will sprint on docs at PyCon US 2019.
+
+
+Positional-only parameters
+--------------------------
+
+There is new syntax (``/``) to indicate that some function parameters
+must be specified positionally (i.e., cannot be used as keyword
+arguments). This is the same notation as shown by ``help()`` for
+functions implemented in C (produced by Larry Hastings' "Argument
+Clinic" tool). Example::
+
+ def pow(x, y, z=None, /):
+ r = x**y
+ if z is not None:
+ r %= z
+ return r
+
+Now ``pow(2, 10)`` and ``pow(2, 10, 17)`` are valid calls, but
+``pow(x=2, y=10)`` and ``pow(2, 10, z=17)`` are invalid.
+
+See :pep:`570` for a full description.
+
+(Contributed by Pablo Galindo in :issue:`36540`.)
+
+.. TODO: Pablo will sprint on docs at PyCon US 2019.
+
+
Parallel filesystem cache for compiled bytecode files
-----------------------------------------------------
@@ -82,6 +123,31 @@ subdirectories).
(Contributed by Carl Meyer in :issue:`33499`.)
+Debug build uses the same ABI as release build
+-----------------------------------------------
+
+Python now uses the same ABI whether it built in release or debug mode. On
+Unix, when Python is built in debug mode, it is now possible to load C
+extensions built in release mode and C extensions built using the stable ABI.
+
+Release builds and debug builds are now ABI compatible: defining the
+``Py_DEBUG`` macro no longer implies the ``Py_TRACE_REFS`` macro, which
+introduces the only ABI incompatibility. The ``Py_TRACE_REFS`` macro, which
+adds the :func:`sys.getobjects` function and the :envvar:`PYTHONDUMPREFS`
+environment variable, can be set using the new ``./configure --with-trace-refs``
+build option.
+(Contributed by Victor Stinner in :issue:`36465`.)
+
+On Unix, C extensions are no longer linked to libpython. It is now possible
+for a statically linked Python to load a C extension built using a shared
+library Python.
+(Contributed by Victor Stinner in :issue:`21536`.)
+
+On Unix, when Python is built in debug mode, import now also looks for C
+extensions compiled in release mode and for C extensions compiled with the
+stable ABI.
+(Contributed by Victor Stinner in :issue:`36722`.)
+
Other Language Changes
======================
@@ -150,17 +216,6 @@ New Modules
Improved Modules
================
-* The :meth:`_asdict()` method for :func:`collections.namedtuple` now returns
- a :class:`dict` instead of a :class:`collections.OrderedDict`. This works because
- regular dicts have guaranteed ordering in since Python 3.7. If the extra
- features of :class:`OrderedDict` are required, the suggested remediation is
- to cast the result to the desired type: ``OrderedDict(nt._asdict())``.
- (Contributed by Raymond Hettinger in :issue:`35864`.)
-
-* The :mod:`unicodedata` module has been upgraded to use the `Unicode 12.0.0
- `_
- release.
-
asyncio
-------
@@ -168,6 +223,17 @@ asyncio
On Windows, the default event loop is now :class:`~asyncio.ProactorEventLoop`.
+collections
+-----------
+
+The :meth:`_asdict()` method for :func:`collections.namedtuple` now returns
+a :class:`dict` instead of a :class:`collections.OrderedDict`. This works because
+regular dicts have guaranteed ordering since Python 3.7. If the extra
+features of :class:`OrderedDict` are required, the suggested remediation is
+to cast the result to the desired type: ``OrderedDict(nt._asdict())``.
+(Contributed by Raymond Hettinger in :issue:`35864`.)
+
+
ctypes
------
@@ -178,6 +244,16 @@ where the DLL is stored (if a full or partial path is used to load the initial
DLL) and paths added by :func:`~os.add_dll_directory`.
+datetime
+--------
+
+Added new alternate constructors :meth:`datetime.date.fromisocalendar` and
+:meth:`datetime.datetime.fromisocalendar`, which construct :class:`date` and
+:class:`datetime` objects respectively from ISO year, week number and weekday;
+these are the inverse of each class's ``isocalendar`` method.
+(Contributed by Paul Ganssle in :issue:`36004`.)
+
+
gettext
-------
@@ -293,6 +369,10 @@ pathlib
contain characters unrepresentable at the OS level.
(Contributed by Serhiy Storchaka in :issue:`33721`.)
+Added :meth:`pathlib.Path.link_to()` which creates a hard link pointing
+to a path.
+(Contributed by Joannah Nanjekye in :issue:`26978`)
+
socket
------
@@ -337,6 +417,10 @@ Added :func:`statistics.geometric_mean()`
Added :func:`statistics.multimode` that returns a list of the most
common values. (Contributed by Raymond Hettinger in :issue:`35892`.)
+Added :func:`statistics.quantiles` that divides data or a distribution
+in to equiprobable intervals (e.g. quartiles, deciles, or percentiles).
+(Contributed by Raymond Hettinger in :issue:`36546`.)
+
Added :class:`statistics.NormalDist`, a tool for creating
and manipulating normal distributions of a random variable.
(Contributed by Raymond Hettinger in :issue:`36018`.)
@@ -344,8 +428,10 @@ and manipulating normal distributions of a random variable.
::
>>> temperature_feb = NormalDist.from_samples([4, 12, -3, 2, 7, 14])
- >>> temperature_feb
- NormalDist(mu=6.0, sigma=6.356099432828281)
+ >>> temperature_feb.mean
+ 6.0
+ >>> temperature_feb.stdev
+ 6.356099432828281
>>> temperature_feb.cdf(3) # Chance of being under 3 degrees
0.3184678262814532
@@ -353,8 +439,8 @@ and manipulating normal distributions of a random variable.
>>> temperature_feb.pdf(7) / temperature_feb.pdf(10)
1.2039930378537762
- >>> el_nino = NormalDist(4, 2.5)
- >>> temperature_feb += el_nino # Add in a climate effect
+ >>> el_niño = NormalDist(4, 2.5)
+ >>> temperature_feb += el_niño # Add in a climate effect
>>> temperature_feb
NormalDist(mu=10.0, sigma=6.830080526611674)
@@ -374,6 +460,16 @@ in a standardized and extensible format, and offers several other benefits.
(Contributed by C.A.M. Gerlach in :issue:`36268`.)
+test
+----
+
+The :mod:`test` module now uses ``CONSTANTS`` for the major platforms
+(``AIX``, ``ANDROID``, ``LINUX``, ``JYTHON``, ``MACOS``, ``MS_WINDOWS``)
+to standardize the style compared to the current practice of some constants
+and different styles of sys.platform and platform.system().
+The constants are defined in `test.support`.
+(Contributed by M. Felt in :issue:`36624`.)
+
tokenize
--------
@@ -410,10 +506,15 @@ Added new clock :data:`~time.CLOCK_UPTIME_RAW` for macOS 10.12.
unicodedata
-----------
+* The :mod:`unicodedata` module has been upgraded to use the `Unicode 12.0.0
+ `_
+ release.
+
* New function :func:`~unicodedata.is_normalized` can be used to verify a string
is in a specific normal form. (Contributed by Max Belanger and David Euresti in
:issue:`32285`).
+
unittest
--------
@@ -430,6 +531,13 @@ venv
activating virtual environments under PowerShell Core 6.1.
(Contributed by Brett Cannon in :issue:`32718`.)
+weakref
+-------
+
+* The proxy objects returned by :func:`weakref.proxy` now support the matrix
+ multiplication operators ``@`` and ``@=`` in addition to the other
+ numeric operators. (Contributed by Mark Dickinson in :issue:`36669`.)
+
xml
---
@@ -438,6 +546,15 @@ xml
external entities by default.
(Contributed by Christian Heimes in :issue:`17239`.)
+* The ``.find*()`` methods in the :mod:`xml.etree.ElementTree` module
+ support wildcard searches like ``{*}tag`` which ignores the namespace
+ and ``{namespace}*`` which returns all tags in the given namespace.
+ (Contributed by Stefan Behnel in :issue:`28238`.)
+
+* The :mod:`xml.etree.ElementTree` module provides a new function
+ :func:`–xml.etree.ElementTree.canonicalize()` that implements C14N 2.0.
+ (Contributed by Stefan Behnel in :issue:`13611`.)
+
Optimizations
=============
@@ -639,6 +756,10 @@ Deprecated
`.
(Contributed by Serhiy Storchaka in :issue:`36492`.)
+* The function :func:`~inspect.getfullargspec` in the :mod:`inspect`
+ module is deprecated in favor of the :func:`inspect.signature`
+ API. (Contributed by Pablo Galindo in :issue:`36751`.)
+
API and Feature Removals
========================
@@ -700,15 +821,38 @@ Changes in Python behavior
raised when getting the attribute from the type dictionary are no longer
ignored. (Contributed by Serhiy Storchaka in :issue:`35459`.)
+* Removed ``__str__`` implementations from builtin types :class:`bool`,
+ :class:`int`, :class:`float`, :class:`complex` and few classes from
+ the standard library. They now inherit ``__str__()`` from :class:`object`.
+ As result, defining the ``__repr__()`` method in the subclass of these
+ classes will affect they string representation.
+ (Contributed by Serhiy Storchaka in :issue:`36793`.)
+
* On AIX, :attr:`sys.platform` doesn't contain the major version anymore.
It is always ``'aix'``, instead of ``'aix3'`` .. ``'aix7'``. Since
older Python versions include the version number, it is recommended to
always use the ``sys.platform.startswith('aix')``.
(Contributed by M. Felt in :issue:`36588`.)
+* :c:func:`PyEval_AcquireLock` and :c:func:`PyEval_AcquireThread` now
+ terminate the current thread if called while the interpreter is
+ finalizing, making them consistent with :c:func:`PyEval_RestoreThread`,
+ :c:func:`Py_END_ALLOW_THREADS`, and :c:func:`PyGILState_Ensure`. If this
+ behaviour is not desired, guard the call by checking :c:func:`_Py_IsFinalizing`
+ or :c:func:`sys.is_finalizing`.
+
Changes in the Python API
-------------------------
+* :class:`subprocess.Popen` can now use :func:`os.posix_spawn` in some cases
+ for better performance. On Windows Subsystem for Linux and QEMU User
+ Emulation, Popen constructor using :func:`os.posix_spawn` no longer raise an
+ exception on errors like missing program, but the child process fails with a
+ non-zero :attr:`~Popen.returncode`.
+
+* The :meth:`imap.IMAP4.logout` method no longer ignores silently arbitrary
+ exceptions.
+
* The function :func:`platform.popen` has been removed, it was deprecated since
Python 3.3: use :func:`os.popen` instead.
@@ -822,16 +966,19 @@ Changes in the Python API
Changes in the C API
--------------------
+* On Unix, C extensions are no longer linked to libpython except on
+ Android. When Python is embedded, ``libpython`` must not be loaded with
+ ``RTLD_LOCAL``, but ``RTLD_GLOBAL`` instead. Previously, using
+ ``RTLD_LOCAL``, it was already not possible to load C extensions which were
+ not linked to ``libpython``, like C extensions of the standard library built
+ by the ``*shared*`` section of ``Modules/Setup``.
+
* Use of ``#`` variants of formats in parsing or building value (e.g.
:c:func:`PyArg_ParseTuple`, :c:func:`Py_BuildValue`, :c:func:`PyObject_CallFunction`,
etc.) without ``PY_SSIZE_T_CLEAN`` defined raises ``DeprecationWarning`` now.
It will be removed in 3.10 or 4.0. Read :ref:`arg-parsing` for detail.
(Contributed by Inada Naoki in :issue:`36381`.)
-
-Changes in the C API
---------------------------
-
* Instances of heap-allocated types (such as those created with
:c:func:`PyType_FromSpec`) hold a reference to their type object.
Increasing the reference count of these type objects has been moved from
diff --git a/Grammar/Grammar b/Grammar/Grammar
index eaebdc4340f41c..0cacfb648e9abb 100644
--- a/Grammar/Grammar
+++ b/Grammar/Grammar
@@ -22,13 +22,55 @@ async_funcdef: ASYNC funcdef
funcdef: 'def' NAME parameters ['->' test] ':' [TYPE_COMMENT] func_body_suite
parameters: '(' [typedargslist] ')'
-typedargslist: (tfpdef ['=' test] (',' [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' [TYPE_COMMENT] [
+
+# The following definition for typedarglist is equivalent to this set of rules:
+#
+# arguments = argument (',' [TYPE_COMMENT] argument)*
+# argument = tfpdef ['=' test]
+# kwargs = '**' tfpdef [','] [TYPE_COMMENT]
+# args = '*' [tfpdef]
+# kwonly_kwargs = (',' [TYPE_COMMENT] argument)* (TYPE_COMMENT | [',' [TYPE_COMMENT] [kwargs]])
+# args_kwonly_kwargs = args kwonly_kwargs | kwargs
+# poskeyword_args_kwonly_kwargs = arguments ( TYPE_COMMENT | [',' [TYPE_COMMENT] [args_kwonly_kwargs]])
+# typedargslist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs
+# typedarglist = (arguments ',' [TYPE_COMMENT] '/' [',' [[TYPE_COMMENT] typedargslist_no_posonly]])|(typedargslist_no_posonly)"
+#
+# It needs to be fully expanded to allow our LL(1) parser to work on it.
+
+typedargslist: (
+ (tfpdef ['=' test] (',' [TYPE_COMMENT] tfpdef ['=' test])* ',' [TYPE_COMMENT] '/' [',' [ [TYPE_COMMENT] tfpdef ['=' test] (
+ ',' [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' [TYPE_COMMENT] [
'*' [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' [TYPE_COMMENT] ['**' tfpdef [','] [TYPE_COMMENT]]])
| '**' tfpdef [','] [TYPE_COMMENT]]])
| '*' [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' [TYPE_COMMENT] ['**' tfpdef [','] [TYPE_COMMENT]]])
+ | '**' tfpdef [','] [TYPE_COMMENT]]] )
+| (tfpdef ['=' test] (',' [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' [TYPE_COMMENT] [
+ '*' [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' [TYPE_COMMENT] ['**' tfpdef [','] [TYPE_COMMENT]]])
+ | '**' tfpdef [','] [TYPE_COMMENT]]])
+ | '*' [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' [TYPE_COMMENT] ['**' tfpdef [','] [TYPE_COMMENT]]])
| '**' tfpdef [','] [TYPE_COMMENT])
+)
tfpdef: NAME [':' test]
-varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [
+
+# The following definition for varargslist is equivalent to this set of rules:
+#
+# arguments = argument (',' argument )*
+# argument = vfpdef ['=' test]
+# kwargs = '**' vfpdef [',']
+# args = '*' [vfpdef]
+# kwonly_kwargs = (',' argument )* [',' [kwargs]]
+# args_kwonly_kwargs = args kwonly_kwargs | kwargs
+# poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]]
+# vararglist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs
+# varargslist = arguments ',' '/' [','[(vararglist_no_posonly)]] | (vararglist_no_posonly)
+#
+# It needs to be fully expanded to allow our LL(1) parser to work on it.
+
+varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [
+ '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
+ | '**' vfpdef [',']]]
+ | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
+ | '**' vfpdef [',']) ]] | (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [
'*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
| '**' vfpdef [',']]]
| '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
diff --git a/Include/Python-ast.h b/Include/Python-ast.h
index a5742ff0485b89..0c739db6d14124 100644
--- a/Include/Python-ast.h
+++ b/Include/Python-ast.h
@@ -427,6 +427,7 @@ struct _excepthandler {
struct _arguments {
asdl_seq *args;
+ asdl_seq *posonlyargs;
arg_ty vararg;
asdl_seq *kwonlyargs;
asdl_seq *kw_defaults;
@@ -684,10 +685,11 @@ excepthandler_ty _Py_ExceptHandler(expr_ty type, identifier name, asdl_seq *
body, int lineno, int col_offset, int
end_lineno, int end_col_offset, PyArena
*arena);
-#define arguments(a0, a1, a2, a3, a4, a5, a6) _Py_arguments(a0, a1, a2, a3, a4, a5, a6)
-arguments_ty _Py_arguments(asdl_seq * args, arg_ty vararg, asdl_seq *
- kwonlyargs, asdl_seq * kw_defaults, arg_ty kwarg,
- asdl_seq * defaults, PyArena *arena);
+#define arguments(a0, a1, a2, a3, a4, a5, a6, a7) _Py_arguments(a0, a1, a2, a3, a4, a5, a6, a7)
+arguments_ty _Py_arguments(asdl_seq * args, asdl_seq * posonlyargs, arg_ty
+ vararg, asdl_seq * kwonlyargs, asdl_seq *
+ kw_defaults, arg_ty kwarg, asdl_seq * defaults,
+ PyArena *arena);
#define arg(a0, a1, a2, a3, a4, a5, a6, a7) _Py_arg(a0, a1, a2, a3, a4, a5, a6, a7)
arg_ty _Py_arg(identifier arg, expr_ty annotation, string type_comment, int
lineno, int col_offset, int end_lineno, int end_col_offset,
diff --git a/Include/code.h b/Include/code.h
index 2e661e8b36b7f7..933de97d078282 100644
--- a/Include/code.h
+++ b/Include/code.h
@@ -21,6 +21,7 @@ typedef uint16_t _Py_CODEUNIT;
typedef struct {
PyObject_HEAD
int co_argcount; /* #arguments, except *args */
+ int co_posonlyargcount; /* #positional only arguments */
int co_kwonlyargcount; /* #keyword only arguments */
int co_nlocals; /* #local variables */
int co_stacksize; /* #entries needed for evaluation stack */
@@ -102,7 +103,7 @@ PyAPI_DATA(PyTypeObject) PyCode_Type;
/* Public interface */
PyAPI_FUNC(PyCodeObject *) PyCode_New(
- int, int, int, int, int, PyObject *, PyObject *,
+ int, int, int, int, int, int, PyObject *, PyObject *,
PyObject *, PyObject *, PyObject *, PyObject *,
PyObject *, PyObject *, int, PyObject *);
/* same as struct above */
diff --git a/Include/cpython/coreconfig.h b/Include/cpython/coreconfig.h
index 7ce1a02e16c635..47a6baa1118fca 100644
--- a/Include/cpython/coreconfig.h
+++ b/Include/cpython/coreconfig.h
@@ -8,10 +8,18 @@ extern "C" {
/* --- _PyInitError ----------------------------------------------- */
typedef struct {
- const char *prefix;
- const char *msg;
- int user_err;
+ enum {
+ _Py_INIT_ERR_TYPE_OK=0,
+ _Py_INIT_ERR_TYPE_ERROR=1,
+ _Py_INIT_ERR_TYPE_EXIT=2
+ } _type;
+ const char *_func;
+ const char *err_msg;
+#ifdef MS_WINDOWS
+ unsigned int exitcode;
+#else
int exitcode;
+#endif
} _PyInitError;
/* Almost all errors causing Python initialization to fail */
@@ -23,18 +31,25 @@ typedef struct {
#endif
#define _Py_INIT_OK() \
- (_PyInitError){.prefix = NULL, .msg = NULL, .user_err = 0, .exitcode = -1}
-#define _Py_INIT_ERR(MSG) \
- (_PyInitError){.prefix = _Py_INIT_GET_FUNC(), .msg = (MSG), .user_err = 0, .exitcode = -1}
-/* Error that can be fixed by the user like invalid input parameter.
- Don't abort() the process on such error. */
-#define _Py_INIT_USER_ERR(MSG) \
- (_PyInitError){.prefix = _Py_INIT_GET_FUNC(), .msg = (MSG), .user_err = 1, .exitcode = -1}
-#define _Py_INIT_NO_MEMORY() _Py_INIT_USER_ERR("memory allocation failed")
+ (_PyInitError){._type = _Py_INIT_ERR_TYPE_OK,}
+ /* other fields are set to 0 */
+#define _Py_INIT_ERR(ERR_MSG) \
+ (_PyInitError){ \
+ ._type = _Py_INIT_ERR_TYPE_ERROR, \
+ ._func = _Py_INIT_GET_FUNC(), \
+ .err_msg = (ERR_MSG)}
+ /* other fields are set to 0 */
+#define _Py_INIT_NO_MEMORY() _Py_INIT_ERR("memory allocation failed")
#define _Py_INIT_EXIT(EXITCODE) \
- (_PyInitError){.prefix = NULL, .msg = NULL, .user_err = 0, .exitcode = (EXITCODE)}
+ (_PyInitError){ \
+ ._type = _Py_INIT_ERR_TYPE_EXIT, \
+ .exitcode = (EXITCODE)}
+#define _Py_INIT_IS_ERROR(err) \
+ (err._type == _Py_INIT_ERR_TYPE_ERROR)
+#define _Py_INIT_IS_EXIT(err) \
+ (err._type == _Py_INIT_ERR_TYPE_EXIT)
#define _Py_INIT_FAILED(err) \
- (err.msg != NULL || err.exitcode != -1)
+ (err._type != _Py_INIT_ERR_TYPE_OK)
/* --- _PyWstrList ------------------------------------------------ */
@@ -50,7 +65,12 @@ typedef struct {
/* --- _PyPreConfig ----------------------------------------------- */
+#define _Py_CONFIG_VERSION 1
+
typedef struct {
+ int _config_version; /* Internal configuration version,
+ used for ABI compatibility */
+
/* If greater than 0, enable isolated mode: sys.path contains
neither the script's directory nor the user's site-packages directory.
@@ -117,6 +137,7 @@ typedef struct {
#define _PyPreConfig_INIT \
(_PyPreConfig){ \
_PyPreConfig_WINDOWS_INIT \
+ ._config_version = _Py_CONFIG_VERSION, \
.isolated = -1, \
.use_environment = -1, \
.dev_mode = -1, \
@@ -126,9 +147,12 @@ typedef struct {
/* --- _PyCoreConfig ---------------------------------------------- */
typedef struct {
- int isolated;
- int use_environment;
- int dev_mode;
+ int _config_version; /* Internal configuration version,
+ used for ABI compatibility */
+
+ int isolated; /* Isolated mode? see _PyPreConfig.isolated */
+ int use_environment; /* Use environment variables? see _PyPreConfig.use_environment */
+ int dev_mode; /* Development mode? See _PyPreConfig.dev_mode */
/* Install signal handlers? Yes by default. */
int install_signal_handlers;
@@ -183,8 +207,8 @@ typedef struct {
See Py_FileSystemDefaultEncoding and Py_FileSystemDefaultEncodeErrors.
*/
- char *filesystem_encoding;
- char *filesystem_errors;
+ wchar_t *filesystem_encoding;
+ wchar_t *filesystem_errors;
wchar_t *pycache_prefix; /* PYTHONPYCACHEPREFIX, -X pycache_prefix=PATH */
wchar_t *program_name; /* Program name, see also Py_GetProgramName() */
@@ -310,13 +334,13 @@ typedef struct {
Value set from PYTHONIOENCODING environment variable and
Py_SetStandardStreamEncoding() function.
See also 'stdio_errors' attribute. */
- char *stdio_encoding;
+ wchar_t *stdio_encoding;
/* Error handler of sys.stdin and sys.stdout.
Value set from PYTHONIOENCODING environment variable and
Py_SetStandardStreamEncoding() function.
See also 'stdio_encoding' attribute. */
- char *stdio_errors;
+ wchar_t *stdio_errors;
#ifdef MS_WINDOWS
/* If greater than zero, use io.FileIO instead of WindowsConsoleIO for sys
@@ -348,7 +372,7 @@ typedef struct {
Needed by freeze_importlib. */
int _install_importlib;
- /* Value of the --check-hash-based-pycs configure option. Valid values:
+ /* Value of the --check-hash-based-pycs command line option:
- "default" means the 'check_source' flag in hash-based pycs
determines invalidation
@@ -357,20 +381,16 @@ typedef struct {
- "never" causes the interpreter to always assume hash-based pycs are
valid
- Set by the --check-hash-based-pycs command line option.
The default value is "default".
See PEP 552 "Deterministic pycs" for more details. */
- const char *_check_hash_pycs_mode;
+ wchar_t *check_hash_pycs_mode;
/* If greater than 0, suppress _PyPathConfig_Calculate() warnings.
If set to -1 (default), inherit Py_FrozenFlag value. */
int _frozen;
- /* If non-zero, use "main" Python initialization */
- int _init_main;
-
} _PyCoreConfig;
#ifdef MS_WINDOWS
@@ -383,6 +403,7 @@ typedef struct {
#define _PyCoreConfig_INIT \
(_PyCoreConfig){ \
_PyCoreConfig_WINDOWS_INIT \
+ ._config_version = _Py_CONFIG_VERSION, \
.isolated = -1, \
.use_environment = -1, \
.dev_mode = -1, \
@@ -403,16 +424,10 @@ typedef struct {
.user_site_directory = -1, \
.buffered_stdio = -1, \
._install_importlib = 1, \
- ._check_hash_pycs_mode = "default", \
- ._frozen = -1, \
- ._init_main = 1}
+ .check_hash_pycs_mode = NULL, \
+ ._frozen = -1}
/* Note: _PyCoreConfig_INIT sets other fields to 0/NULL */
-
-/* --- Function used for testing ---------------------------------- */
-
-PyAPI_FUNC(PyObject*) _Py_GetConfigsAsDict(void);
-
#ifdef __cplusplus
}
#endif
diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h
index a0953f03261dc3..94331f35e1bd43 100644
--- a/Include/cpython/pystate.h
+++ b/Include/cpython/pystate.h
@@ -155,9 +155,6 @@ PyAPI_FUNC(PyInterpreterState *) _PyInterpreterState_Get(void);
PyAPI_FUNC(int) _PyState_AddModule(PyObject*, struct PyModuleDef*);
PyAPI_FUNC(void) _PyState_ClearModules(void);
PyAPI_FUNC(PyThreadState *) _PyThreadState_Prealloc(PyInterpreterState *);
-PyAPI_FUNC(void) _PyThreadState_Init(PyThreadState *);
-PyAPI_FUNC(void) _PyThreadState_DeleteExcept(PyThreadState *tstate);
-PyAPI_FUNC(void) _PyGILState_Reinit(void);
/* Similar to PyThreadState_Get(), but don't issue a fatal error
* if it is NULL. */
diff --git a/Include/grammar.h b/Include/grammar.h
index 7a6182bb76d57c..4b66b1e9b97451 100644
--- a/Include/grammar.h
+++ b/Include/grammar.h
@@ -13,7 +13,7 @@ extern "C" {
typedef struct {
int lb_type;
- char *lb_str;
+ const char *lb_str;
} label;
#define EMPTY 0 /* Label number 0 is by definition the empty label */
@@ -22,7 +22,7 @@ typedef struct {
typedef struct {
int ll_nlabels;
- label *ll_label;
+ const label *ll_label;
} labellist;
/* An arc from one state to another */
@@ -36,7 +36,7 @@ typedef struct {
typedef struct {
int s_narcs;
- arc *s_arc; /* Array of arcs */
+ const arc *s_arc; /* Array of arcs */
/* Optional accelerators */
int s_lower; /* Lowest label index */
@@ -59,14 +59,14 @@ typedef struct {
typedef struct {
int g_ndfas;
- dfa *g_dfa; /* Array of DFAs */
- labellist g_ll;
+ const dfa *g_dfa; /* Array of DFAs */
+ const labellist g_ll;
int g_start; /* Start symbol of the grammar */
int g_accel; /* Set if accelerators present */
} grammar;
/* FUNCTIONS */
-dfa *PyGrammar_FindDFA(grammar *g, int type);
+const dfa *PyGrammar_FindDFA(grammar *g, int type);
const char *PyGrammar_LabelRepr(label *lb);
void PyGrammar_AddAccelerators(grammar *g);
void PyGrammar_RemoveAccelerators(grammar *);
diff --git a/Include/internal/pycore_accu.h b/Include/internal/pycore_accu.h
index 4350db58a26905..d346222e4dd0c9 100644
--- a/Include/internal/pycore_accu.h
+++ b/Include/internal/pycore_accu.h
@@ -9,8 +9,8 @@ extern "C" {
*** Its definition may be changed or removed at any moment.
***/
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
/*
diff --git a/Include/internal/pycore_atomic.h b/Include/internal/pycore_atomic.h
index 7aa7eed6f7c20c..336bc3fec27e5d 100644
--- a/Include/internal/pycore_atomic.h
+++ b/Include/internal/pycore_atomic.h
@@ -4,8 +4,8 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#include "dynamic_annotations.h"
@@ -261,13 +261,13 @@ typedef struct _Py_atomic_int {
#define _Py_atomic_store_64bit(ATOMIC_VAL, NEW_VAL, ORDER) \
switch (ORDER) { \
case _Py_memory_order_acquire: \
- _InterlockedExchange64_HLEAcquire((__int64 volatile*)ATOMIC_VAL, (__int64)NEW_VAL); \
+ _InterlockedExchange64_HLEAcquire((__int64 volatile*)&((ATOMIC_VAL)->_value), (__int64)(NEW_VAL)); \
break; \
case _Py_memory_order_release: \
- _InterlockedExchange64_HLERelease((__int64 volatile*)ATOMIC_VAL, (__int64)NEW_VAL); \
+ _InterlockedExchange64_HLERelease((__int64 volatile*)&((ATOMIC_VAL)->_value), (__int64)(NEW_VAL)); \
break; \
default: \
- _InterlockedExchange64((__int64 volatile*)ATOMIC_VAL, (__int64)NEW_VAL); \
+ _InterlockedExchange64((__int64 volatile*)&((ATOMIC_VAL)->_value), (__int64)(NEW_VAL)); \
break; \
}
#else
@@ -277,13 +277,13 @@ typedef struct _Py_atomic_int {
#define _Py_atomic_store_32bit(ATOMIC_VAL, NEW_VAL, ORDER) \
switch (ORDER) { \
case _Py_memory_order_acquire: \
- _InterlockedExchange_HLEAcquire((volatile long*)ATOMIC_VAL, (int)NEW_VAL); \
+ _InterlockedExchange_HLEAcquire((volatile long*)&((ATOMIC_VAL)->_value), (int)(NEW_VAL)); \
break; \
case _Py_memory_order_release: \
- _InterlockedExchange_HLERelease((volatile long*)ATOMIC_VAL, (int)NEW_VAL); \
+ _InterlockedExchange_HLERelease((volatile long*)&((ATOMIC_VAL)->_value), (int)(NEW_VAL)); \
break; \
default: \
- _InterlockedExchange((volatile long*)ATOMIC_VAL, (int)NEW_VAL); \
+ _InterlockedExchange((volatile long*)&((ATOMIC_VAL)->_value), (int)(NEW_VAL)); \
break; \
}
@@ -292,7 +292,7 @@ typedef struct _Py_atomic_int {
gil_created() uses -1 as a sentinel value, if this returns
a uintptr_t it will do an unsigned compare and crash
*/
-inline intptr_t _Py_atomic_load_64bit(volatile uintptr_t* value, int order) {
+inline intptr_t _Py_atomic_load_64bit_impl(volatile uintptr_t* value, int order) {
__int64 old;
switch (order) {
case _Py_memory_order_acquire:
@@ -323,11 +323,14 @@ inline intptr_t _Py_atomic_load_64bit(volatile uintptr_t* value, int order) {
return old;
}
+#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) \
+ _Py_atomic_load_64bit_impl((volatile uintptr_t*)&((ATOMIC_VAL)->_value), (ORDER))
+
#else
-#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) *(ATOMIC_VAL)
+#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) ((ATOMIC_VAL)->_value)
#endif
-inline int _Py_atomic_load_32bit(volatile int* value, int order) {
+inline int _Py_atomic_load_32bit_impl(volatile int* value, int order) {
long old;
switch (order) {
case _Py_memory_order_acquire:
@@ -358,16 +361,19 @@ inline int _Py_atomic_load_32bit(volatile int* value, int order) {
return old;
}
+#define _Py_atomic_load_32bit(ATOMIC_VAL, ORDER) \
+ _Py_atomic_load_32bit_impl((volatile int*)&((ATOMIC_VAL)->_value), (ORDER))
+
#define _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, ORDER) \
if (sizeof((ATOMIC_VAL)->_value) == 8) { \
- _Py_atomic_store_64bit((volatile long long*)&((ATOMIC_VAL)->_value), NEW_VAL, ORDER) } else { \
- _Py_atomic_store_32bit((volatile long*)&((ATOMIC_VAL)->_value), NEW_VAL, ORDER) }
+ _Py_atomic_store_64bit((ATOMIC_VAL), NEW_VAL, ORDER) } else { \
+ _Py_atomic_store_32bit((ATOMIC_VAL), NEW_VAL, ORDER) }
#define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) \
( \
sizeof((ATOMIC_VAL)->_value) == 8 ? \
- _Py_atomic_load_64bit((volatile long long*)&((ATOMIC_VAL)->_value), ORDER) : \
- _Py_atomic_load_32bit((volatile long*)&((ATOMIC_VAL)->_value), ORDER) \
+ _Py_atomic_load_64bit((ATOMIC_VAL), ORDER) : \
+ _Py_atomic_load_32bit((ATOMIC_VAL), ORDER) \
)
#elif defined(_M_ARM) || defined(_M_ARM64)
typedef enum _Py_memory_order {
@@ -422,7 +428,7 @@ typedef struct _Py_atomic_int {
gil_created() uses -1 as a sentinel value, if this returns
a uintptr_t it will do an unsigned compare and crash
*/
-inline intptr_t _Py_atomic_load_64bit(volatile uintptr_t* value, int order) {
+inline intptr_t _Py_atomic_load_64bit_impl(volatile uintptr_t* value, int order) {
uintptr_t old;
switch (order) {
case _Py_memory_order_acquire:
@@ -453,11 +459,14 @@ inline intptr_t _Py_atomic_load_64bit(volatile uintptr_t* value, int order) {
return old;
}
+#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) \
+ _Py_atomic_load_64bit_impl((volatile uintptr_t*)&((ATOMIC_VAL)->_value), (ORDER))
+
#else
-#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) *(ATOMIC_VAL)
+#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) ((ATOMIC_VAL)->_value)
#endif
-inline int _Py_atomic_load_32bit(volatile int* value, int order) {
+inline int _Py_atomic_load_32bit_impl(volatile int* value, int order) {
int old;
switch (order) {
case _Py_memory_order_acquire:
@@ -488,16 +497,19 @@ inline int _Py_atomic_load_32bit(volatile int* value, int order) {
return old;
}
+#define _Py_atomic_load_32bit(ATOMIC_VAL, ORDER) \
+ _Py_atomic_load_32bit_impl((volatile int*)&((ATOMIC_VAL)->_value), (ORDER))
+
#define _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, ORDER) \
if (sizeof((ATOMIC_VAL)->_value) == 8) { \
- _Py_atomic_store_64bit(&((ATOMIC_VAL)->_value), NEW_VAL, ORDER) } else { \
- _Py_atomic_store_32bit(&((ATOMIC_VAL)->_value), NEW_VAL, ORDER) }
+ _Py_atomic_store_64bit((ATOMIC_VAL), (NEW_VAL), (ORDER)) } else { \
+ _Py_atomic_store_32bit((ATOMIC_VAL), (NEW_VAL), (ORDER)) }
#define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) \
( \
sizeof((ATOMIC_VAL)->_value) == 8 ? \
- _Py_atomic_load_64bit(&((ATOMIC_VAL)->_value), ORDER) : \
- _Py_atomic_load_32bit(&((ATOMIC_VAL)->_value), ORDER) \
+ _Py_atomic_load_64bit((ATOMIC_VAL), (ORDER)) : \
+ _Py_atomic_load_32bit((ATOMIC_VAL), (ORDER)) \
)
#endif
#else /* !gcc x86 !_msc_ver */
@@ -529,16 +541,16 @@ typedef struct _Py_atomic_int {
/* Standardized shortcuts. */
#define _Py_atomic_store(ATOMIC_VAL, NEW_VAL) \
- _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, _Py_memory_order_seq_cst)
+ _Py_atomic_store_explicit((ATOMIC_VAL), (NEW_VAL), _Py_memory_order_seq_cst)
#define _Py_atomic_load(ATOMIC_VAL) \
- _Py_atomic_load_explicit(ATOMIC_VAL, _Py_memory_order_seq_cst)
+ _Py_atomic_load_explicit((ATOMIC_VAL), _Py_memory_order_seq_cst)
/* Python-local extensions */
#define _Py_atomic_store_relaxed(ATOMIC_VAL, NEW_VAL) \
- _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, _Py_memory_order_relaxed)
+ _Py_atomic_store_explicit((ATOMIC_VAL), (NEW_VAL), _Py_memory_order_relaxed)
#define _Py_atomic_load_relaxed(ATOMIC_VAL) \
- _Py_atomic_load_explicit(ATOMIC_VAL, _Py_memory_order_relaxed)
+ _Py_atomic_load_explicit((ATOMIC_VAL), _Py_memory_order_relaxed)
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h
index 2ead96c7abe32a..0bb19f1aa3b642 100644
--- a/Include/internal/pycore_ceval.h
+++ b/Include/internal/pycore_ceval.h
@@ -4,8 +4,8 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#include "pycore_atomic.h"
diff --git a/Include/internal/pycore_condvar.h b/Include/internal/pycore_condvar.h
index a12b6994ad55cc..8b89d709510a33 100644
--- a/Include/internal/pycore_condvar.h
+++ b/Include/internal/pycore_condvar.h
@@ -1,8 +1,8 @@
#ifndef Py_INTERNAL_CONDVAR_H
#define Py_INTERNAL_CONDVAR_H
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#ifndef _POSIX_THREADS
diff --git a/Include/internal/pycore_context.h b/Include/internal/pycore_context.h
index 70701cdd11dc61..5e1ba0d0393f4a 100644
--- a/Include/internal/pycore_context.h
+++ b/Include/internal/pycore_context.h
@@ -1,8 +1,8 @@
#ifndef Py_INTERNAL_CONTEXT_H
#define Py_INTERNAL_CONTEXT_H
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#include "pycore_hamt.h"
diff --git a/Include/internal/pycore_coreconfig.h b/Include/internal/pycore_coreconfig.h
index 3a27628aa7408b..d48904e482a45d 100644
--- a/Include/internal/pycore_coreconfig.h
+++ b/Include/internal/pycore_coreconfig.h
@@ -4,10 +4,12 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN defined"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
+#include "pycore_pystate.h" /* _PyRuntimeState */
+
/* --- _PyWstrList ------------------------------------------------ */
@@ -100,15 +102,36 @@ PyAPI_FUNC(_PyInitError) _PyPreConfig_Write(_PyPreConfig *config);
/* --- _PyCoreConfig ---------------------------------------------- */
PyAPI_FUNC(void) _PyCoreConfig_Clear(_PyCoreConfig *);
-PyAPI_FUNC(int) _PyCoreConfig_Copy(
+PyAPI_FUNC(_PyInitError) _PyCoreConfig_Copy(
_PyCoreConfig *config,
const _PyCoreConfig *config2);
+PyAPI_FUNC(_PyInitError) _PyCoreConfig_SetString(
+ wchar_t **config_str,
+ const wchar_t *str);
+PyAPI_FUNC(_PyInitError) _PyCoreConfig_DecodeLocale(
+ wchar_t **config_str,
+ const char *str);
PyAPI_FUNC(_PyInitError) _PyCoreConfig_InitPathConfig(_PyCoreConfig *config);
PyAPI_FUNC(_PyInitError) _PyCoreConfig_SetPathConfig(
const _PyCoreConfig *config);
-PyAPI_FUNC(_PyInitError) _PyCoreConfig_Read(_PyCoreConfig *config,
+PyAPI_FUNC(_PyInitError) _PyCoreConfig_Read(_PyCoreConfig *config);
+PyAPI_FUNC(void) _PyCoreConfig_Write(const _PyCoreConfig *config,
+ _PyRuntimeState *runtime);
+PyAPI_FUNC(_PyInitError) _PyCoreConfig_SetPyArgv(
+ _PyCoreConfig *config,
const _PyArgv *args);
-PyAPI_FUNC(void) _PyCoreConfig_Write(const _PyCoreConfig *config);
+PyAPI_FUNC(_PyInitError) _PyCoreConfig_SetArgv(
+ _PyCoreConfig *config,
+ int argc,
+ char **argv);
+PyAPI_FUNC(_PyInitError) _PyCoreConfig_SetWideArgv(_PyCoreConfig *config,
+ int argc,
+ wchar_t **argv);
+
+
+/* --- Function used for testing ---------------------------------- */
+
+PyAPI_FUNC(PyObject*) _Py_GetConfigsAsDict(void);
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_getopt.h b/Include/internal/pycore_getopt.h
index 0d1897c75a6403..834b8c8a14092f 100644
--- a/Include/internal/pycore_getopt.h
+++ b/Include/internal/pycore_getopt.h
@@ -1,8 +1,8 @@
#ifndef Py_INTERNAL_PYGETOPT_H
#define Py_INTERNAL_PYGETOPT_H
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
extern int _PyOS_opterr;
diff --git a/Include/internal/pycore_gil.h b/Include/internal/pycore_gil.h
index 014e75fd182f18..7de316397b15e8 100644
--- a/Include/internal/pycore_gil.h
+++ b/Include/internal/pycore_gil.h
@@ -4,8 +4,8 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#include "pycore_condvar.h"
diff --git a/Include/internal/pycore_hamt.h b/Include/internal/pycore_hamt.h
index 8b2ce1fc96c346..e65aef5e21a954 100644
--- a/Include/internal/pycore_hamt.h
+++ b/Include/internal/pycore_hamt.h
@@ -1,8 +1,8 @@
#ifndef Py_INTERNAL_HAMT_H
#define Py_INTERNAL_HAMT_H
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#define _Py_HAMT_MAX_TREE_DEPTH 7
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index c95595358a9e84..81548f819198e3 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -4,8 +4,8 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN defined"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#include "pycore_pystate.h" /* _PyRuntime */
diff --git a/Include/internal/pycore_pathconfig.h b/Include/internal/pycore_pathconfig.h
index 80d86a0dd1b54c..9eb8e88df76736 100644
--- a/Include/internal/pycore_pathconfig.h
+++ b/Include/internal/pycore_pathconfig.h
@@ -4,8 +4,8 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
typedef struct _PyPathConfig {
diff --git a/Include/internal/pycore_pyhash.h b/Include/internal/pycore_pyhash.h
index babbc95b879e35..a229f8d8b7f0a2 100644
--- a/Include/internal/pycore_pyhash.h
+++ b/Include/internal/pycore_pyhash.h
@@ -1,8 +1,8 @@
#ifndef Py_INTERNAL_HASH_H
#define Py_INTERNAL_HASH_H
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
uint64_t _Py_KeyedHash(uint64_t, const char *, Py_ssize_t);
diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h
index d837ea4fb33a51..adb1f5d90a5960 100644
--- a/Include/internal/pycore_pylifecycle.h
+++ b/Include/internal/pycore_pylifecycle.h
@@ -4,20 +4,27 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
+#include "pycore_coreconfig.h" /* _PyArgv */
+#include "pycore_pystate.h" /* _PyRuntimeState */
+
/* True if the main interpreter thread exited due to an unhandled
* KeyboardInterrupt exception, suggesting the user pressed ^C. */
PyAPI_DATA(int) _Py_UnhandledKeyboardInterrupt;
PyAPI_FUNC(int) _Py_UnixMain(int argc, char **argv);
-PyAPI_FUNC(int) _Py_SetFileSystemEncoding(
+extern int _Py_SetFileSystemEncoding(
const char *encoding,
const char *errors);
-PyAPI_FUNC(void) _Py_ClearFileSystemEncoding(void);
+extern void _Py_ClearFileSystemEncoding(void);
+extern _PyInitError _PyUnicode_InitEncodings(PyInterpreterState *interp);
+#ifdef MS_WINDOWS
+extern int _PyUnicode_EnableLegacyWindowsFSEncoding(void);
+#endif
PyAPI_FUNC(void) _Py_ClearStandardStreamEncoding(void);
@@ -32,10 +39,13 @@ extern _PyInitError _PyFaulthandler_Init(int enable);
extern int _PyTraceMalloc_Init(int enable);
extern PyObject * _PyBuiltin_Init(void);
extern _PyInitError _PySys_Create(
+ _PyRuntimeState *runtime,
PyInterpreterState *interp,
PyObject **sysmod_p);
extern _PyInitError _PySys_SetPreliminaryStderr(PyObject *sysdict);
-extern int _PySys_InitMain(PyInterpreterState *interp);
+extern int _PySys_InitMain(
+ _PyRuntimeState *runtime,
+ PyInterpreterState *interp);
extern _PyInitError _PyImport_Init(PyInterpreterState *interp);
extern _PyInitError _PyExc_Init(void);
extern _PyInitError _PyBuiltins_AddExceptions(PyObject * bltinmod);
@@ -63,7 +73,7 @@ extern void PyAsyncGen_Fini(void);
extern void _PyExc_Fini(void);
extern void _PyImport_Fini(void);
extern void _PyImport_Fini2(void);
-extern void _PyGC_Fini(void);
+extern void _PyGC_Fini(_PyRuntimeState *runtime);
extern void _PyType_Fini(void);
extern void _Py_HashRandomization_Fini(void);
extern void _PyUnicode_Fini(void);
@@ -71,14 +81,22 @@ extern void PyLong_Fini(void);
extern void _PyFaulthandler_Fini(void);
extern void _PyHash_Fini(void);
extern int _PyTraceMalloc_Fini(void);
+extern void _PyWarnings_Fini(_PyRuntimeState *runtime);
-extern void _PyGILState_Init(PyInterpreterState *, PyThreadState *);
-extern void _PyGILState_Fini(void);
+extern void _PyGILState_Init(
+ _PyRuntimeState *runtime,
+ PyInterpreterState *interp,
+ PyThreadState *tstate);
+extern void _PyGILState_Fini(_PyRuntimeState *runtime);
-PyAPI_FUNC(void) _PyGC_DumpShutdownStats(void);
+PyAPI_FUNC(void) _PyGC_DumpShutdownStats(_PyRuntimeState *runtime);
+PyAPI_FUNC(_PyInitError) _Py_PreInitializeFromPyArgv(
+ const _PyPreConfig *src_config,
+ const _PyArgv *args);
PyAPI_FUNC(_PyInitError) _Py_PreInitializeFromCoreConfig(
- const _PyCoreConfig *coreconfig);
+ const _PyCoreConfig *coreconfig,
+ const _PyArgv *args);
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h
index 8da1bd9e304ac6..20f3b5e40067c6 100644
--- a/Include/internal/pycore_pymem.h
+++ b/Include/internal/pycore_pymem.h
@@ -4,8 +4,8 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN defined"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#include "objimpl.h"
diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h
index df3730f8014ae4..67bcd147e2829f 100644
--- a/Include/internal/pycore_pystate.h
+++ b/Include/internal/pycore_pystate.h
@@ -4,8 +4,8 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#include "cpython/coreconfig.h"
@@ -56,7 +56,14 @@ struct _is {
PyObject *codec_search_cache;
PyObject *codec_error_registry;
int codecs_initialized;
- int fscodec_initialized;
+
+ /* fs_codec.encoding is initialized to NULL.
+ Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */
+ struct {
+ char *encoding; /* Filesystem encoding (encoded to UTF-8) */
+ char *errors; /* Filesystem errors (encoded to UTF-8) */
+ _Py_error_handler error_handler;
+ } fs_codec;
_PyCoreConfig core_config;
#ifdef HAVE_DLOPEN
@@ -185,9 +192,9 @@ typedef struct pyruntimestate {
/* Note: _PyRuntimeState_INIT sets other fields to 0/NULL */
PyAPI_DATA(_PyRuntimeState) _PyRuntime;
-PyAPI_FUNC(_PyInitError) _PyRuntimeState_Init(_PyRuntimeState *);
-PyAPI_FUNC(void) _PyRuntimeState_Fini(_PyRuntimeState *);
-PyAPI_FUNC(void) _PyRuntimeState_ReInitThreads(void);
+PyAPI_FUNC(_PyInitError) _PyRuntimeState_Init(_PyRuntimeState *runtime);
+PyAPI_FUNC(void) _PyRuntimeState_Fini(_PyRuntimeState *runtime);
+PyAPI_FUNC(void) _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime);
/* Initialize _PyRuntimeState.
Return NULL on success, or return an error message on failure. */
@@ -231,8 +238,15 @@ PyAPI_FUNC(void) _PyRuntime_Finalize(void);
/* Other */
-PyAPI_FUNC(_PyInitError) _PyInterpreterState_Enable(_PyRuntimeState *);
-PyAPI_FUNC(void) _PyInterpreterState_DeleteExceptMain(void);
+PyAPI_FUNC(void) _PyThreadState_Init(
+ _PyRuntimeState *runtime,
+ PyThreadState *tstate);
+PyAPI_FUNC(void) _PyThreadState_DeleteExcept(PyThreadState *tstate);
+
+PyAPI_FUNC(_PyInitError) _PyInterpreterState_Enable(_PyRuntimeState *runtime);
+PyAPI_FUNC(void) _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime);
+
+PyAPI_FUNC(void) _PyGILState_Reinit(_PyRuntimeState *runtime);
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_tupleobject.h b/Include/internal/pycore_tupleobject.h
index d0c5b620d3561e..9fcfc5c6ec7196 100644
--- a/Include/internal/pycore_tupleobject.h
+++ b/Include/internal/pycore_tupleobject.h
@@ -4,8 +4,8 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#include "tupleobject.h"
diff --git a/Include/internal/pycore_warnings.h b/Include/internal/pycore_warnings.h
index 91bf90232f5c79..73e5350aff1451 100644
--- a/Include/internal/pycore_warnings.h
+++ b/Include/internal/pycore_warnings.h
@@ -4,8 +4,8 @@
extern "C" {
#endif
-#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN)
-# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define"
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
#endif
#include "object.h"
diff --git a/Include/object.h b/Include/object.h
index 86cbfc581ed181..13e88a6dc6f02a 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -54,13 +54,8 @@ A standard interface exists for objects that contain an array of items
whose size is determined when the object is allocated.
*/
-/* Py_DEBUG implies Py_TRACE_REFS. */
-#if defined(Py_DEBUG) && !defined(Py_TRACE_REFS)
-#define Py_TRACE_REFS
-#endif
-
-/* Py_TRACE_REFS implies Py_REF_DEBUG. */
-#if defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
+/* Py_DEBUG implies Py_REF_DEBUG. */
+#if defined(Py_DEBUG) && !defined(Py_REF_DEBUG)
#define Py_REF_DEBUG
#endif
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index 497dda046ae9a5..da787f27cc88a7 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -20,10 +20,10 @@
#define PY_MINOR_VERSION 8
#define PY_MICRO_VERSION 0
#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA
-#define PY_RELEASE_SERIAL 3
+#define PY_RELEASE_SERIAL 4
/* Version as a string */
-#define PY_VERSION "3.8.0a3+"
+#define PY_VERSION "3.8.0a4+"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
diff --git a/Include/pyerrors.h b/Include/pyerrors.h
index 5c6751868df489..94af3cb3420ec6 100644
--- a/Include/pyerrors.h
+++ b/Include/pyerrors.h
@@ -21,17 +21,6 @@ PyAPI_FUNC(void) PyErr_GetExcInfo(PyObject **, PyObject **, PyObject **);
PyAPI_FUNC(void) PyErr_SetExcInfo(PyObject *, PyObject *, PyObject *);
#endif
-#if defined(__clang__) || \
- (defined(__GNUC__) && \
- ((__GNUC__ >= 3) || \
- (__GNUC__ == 2) && (__GNUC_MINOR__ >= 5)))
-# define _Py_NO_RETURN __attribute__((__noreturn__))
-#elif defined(_MSC_VER)
-# define _Py_NO_RETURN __declspec(noreturn)
-#else
-# define _Py_NO_RETURN
-#endif
-
/* Defined in Python/pylifecycle.c */
PyAPI_FUNC(void) _Py_NO_RETURN Py_FatalError(const char *message);
diff --git a/Include/pymacro.h b/Include/pymacro.h
index 3f6ddbe9977ace..546f9c6e702025 100644
--- a/Include/pymacro.h
+++ b/Include/pymacro.h
@@ -67,7 +67,7 @@
/* Define macros for inline documentation. */
-#define PyDoc_VAR(name) static char name[]
+#define PyDoc_VAR(name) static const char name[]
#define PyDoc_STRVAR(name,str) PyDoc_VAR(name) = PyDoc_STR(str)
#ifdef WITH_DOC_STRINGS
#define PyDoc_STR(str) str
diff --git a/Include/pyport.h b/Include/pyport.h
index 4971a493ccee22..ab88a9ac5c529e 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -5,6 +5,27 @@
#include
+
+/* Defines to build Python and its standard library:
+ *
+ * - Py_BUILD_CORE: Build Python core. Give access to Python internals, but
+ * should not be used by third-party modules.
+ * - Py_BUILD_CORE_BUILTIN: Build a Python stdlib module as a built-in module.
+ * - Py_BUILD_CORE_MODULE: Build a Python stdlib module as a dynamic library.
+ *
+ * Py_BUILD_CORE_BUILTIN and Py_BUILD_CORE_MODULE imply Py_BUILD_CORE.
+ *
+ * On Windows, Py_BUILD_CORE_MODULE exports "PyInit_xxx" symbol, whereas
+ * Py_BUILD_CORE_BUILTIN does not.
+ */
+#if defined(Py_BUILD_CORE_BUILTIN) && !defined(Py_BUILD_CORE)
+# define Py_BUILD_CORE
+#endif
+#if defined(Py_BUILD_CORE_MODULE) && !defined(Py_BUILD_CORE)
+# define Py_BUILD_CORE
+#endif
+
+
/**************************************************************************
Symbols and macros to supply platform-independent interfaces to basic
C language & library operations whose spellings vary across platforms.
@@ -406,7 +427,7 @@ extern "C" {
#endif
/* get and set x87 control word for VisualStudio/x86 */
-#if defined(_MSC_VER) && defined(_M_IX86) /* x87 only supported in x86 */
+#if defined(_MSC_VER) && !defined(_WIN64) && !defined(_M_ARM) /* x87 not supported in 64-bit or ARM */
#define HAVE_PY_SET_53BIT_PRECISION 1
#define _Py_SET_53BIT_PRECISION_HEADER \
unsigned int old_387controlword, new_387controlword, out_387controlword
@@ -623,7 +644,7 @@ extern char * _getpty(int *, int, mode_t, int);
/* only get special linkage if built as shared or platform is Cygwin */
#if defined(Py_ENABLE_SHARED) || defined(__CYGWIN__)
# if defined(HAVE_DECLSPEC_DLL)
-# if defined(Py_BUILD_CORE) || defined(Py_BUILD_CORE_BUILTIN)
+# if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
# define PyAPI_FUNC(RTYPE) __declspec(dllexport) RTYPE
# define PyAPI_DATA(RTYPE) extern __declspec(dllexport) RTYPE
/* module init functions inside the core need no external linkage */
@@ -755,7 +776,7 @@ extern char * _getpty(int *, int, mode_t, int);
#define PY_LITTLE_ENDIAN 1
#endif
-#if defined(Py_BUILD_CORE) || defined(Py_BUILD_CORE_BUILTIN)
+#ifdef Py_BUILD_CORE
/*
* Macros to protect CRT calls against instant termination when passed an
* invalid parameter (issue23524).
@@ -776,9 +797,9 @@ extern _invalid_parameter_handler _Py_silent_invalid_parameter_handler;
#endif /* Py_BUILD_CORE */
#ifdef __ANDROID__
-/* The Android langinfo.h header is not used. */
-#undef HAVE_LANGINFO_H
-#undef CODESET
+ /* The Android langinfo.h header is not used. */
+# undef HAVE_LANGINFO_H
+# undef CODESET
#endif
/* Maximum value of the Windows DWORD type */
@@ -789,7 +810,37 @@ extern _invalid_parameter_handler _Py_silent_invalid_parameter_handler;
* for compatibility.
*/
#ifndef WITH_THREAD
-#define WITH_THREAD
+# define WITH_THREAD
+#endif
+
+/* Check that ALT_SOABI is consistent with Py_TRACE_REFS:
+ ./configure --with-trace-refs should must be used to define Py_TRACE_REFS */
+#if defined(ALT_SOABI) && defined(Py_TRACE_REFS)
+# error "Py_TRACE_REFS ABI is not compatible with release and debug ABI"
+#endif
+
+#if defined(__ANDROID__) || defined(__VXWORKS__)
+ /* Ignore the locale encoding: force UTF-8 */
+# define _Py_FORCE_UTF8_LOCALE
+#endif
+
+#if defined(_Py_FORCE_UTF8_LOCALE) || defined(__APPLE__)
+ /* Use UTF-8 as filesystem encoding */
+# define _Py_FORCE_UTF8_FS_ENCODING
+#endif
+
+/* Mark a function which cannot return. Example:
+
+ PyAPI_FUNC(void) _Py_NO_RETURN PyThread_exit_thread(void); */
+#if defined(__clang__) || \
+ (defined(__GNUC__) && \
+ ((__GNUC__ >= 3) || \
+ (__GNUC__ == 2) && (__GNUC_MINOR__ >= 5)))
+# define _Py_NO_RETURN __attribute__((__noreturn__))
+#elif defined(_MSC_VER)
+# define _Py_NO_RETURN __declspec(noreturn)
+#else
+# define _Py_NO_RETURN
#endif
#endif /* Py_PYPORT_H */
diff --git a/Include/pythonrun.h b/Include/pythonrun.h
index 6f0c6fc6554379..e83846add981cd 100644
--- a/Include/pythonrun.h
+++ b/Include/pythonrun.h
@@ -165,7 +165,7 @@ PyAPI_DATA(PyThreadState*) _PyOS_ReadlineTState;
to an 8k margin. */
#define PYOS_STACK_MARGIN 2048
-#if defined(WIN32) && !defined(MS_WIN64) && defined(_MSC_VER) && _MSC_VER >= 1300
+#if defined(WIN32) && !defined(MS_WIN64) && !defined(_M_ARM) && defined(_MSC_VER) && _MSC_VER >= 1300
/* Enable stack checking under Microsoft C */
#define USE_STACKCHECK
#endif
diff --git a/Include/pythread.h b/Include/pythread.h
index eb61033b2d9089..bc1d92cd1ff199 100644
--- a/Include/pythread.h
+++ b/Include/pythread.h
@@ -23,7 +23,7 @@ typedef enum PyLockStatus {
PyAPI_FUNC(void) PyThread_init_thread(void);
PyAPI_FUNC(unsigned long) PyThread_start_new_thread(void (*)(void *), void *);
-PyAPI_FUNC(void) PyThread_exit_thread(void);
+PyAPI_FUNC(void) _Py_NO_RETURN PyThread_exit_thread(void);
PyAPI_FUNC(unsigned long) PyThread_get_thread_ident(void);
PyAPI_FUNC(PyThread_type_lock) PyThread_allocate_lock(void);
diff --git a/Lib/_pydecimal.py b/Lib/_pydecimal.py
index 44ea5b41b2a1f6..c14d8ca86a1181 100644
--- a/Lib/_pydecimal.py
+++ b/Lib/_pydecimal.py
@@ -5631,8 +5631,6 @@ def __init__(self, value=None):
def __repr__(self):
return "(%r, %r, %r)" % (self.sign, self.int, self.exp)
- __str__ = __repr__
-
def _normalize(op1, op2, prec = 0):
diff --git a/Lib/_pyio.py b/Lib/_pyio.py
index e868fdc7cbc5c2..af2ce30c278062 100644
--- a/Lib/_pyio.py
+++ b/Lib/_pyio.py
@@ -551,6 +551,11 @@ def readlines(self, hint=None):
return lines
def writelines(self, lines):
+ """Write a list of lines to the stream.
+
+ Line separators are not added, so it is usual for each of the lines
+ provided to have a line separator at the end.
+ """
self._checkClosed()
for line in lines:
self.write(line)
diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py
index 9b4b846131de10..9613ac2a114f0d 100644
--- a/Lib/asyncio/base_events.py
+++ b/Lib/asyncio/base_events.py
@@ -16,6 +16,7 @@
import collections
import collections.abc
import concurrent.futures
+import functools
import heapq
import itertools
import os
@@ -41,6 +42,7 @@
from . import futures
from . import protocols
from . import sslproto
+from . import staggered
from . import tasks
from . import transports
from .log import logger
@@ -159,6 +161,28 @@ def _ipaddr_info(host, port, family, type, proto):
return None
+def _interleave_addrinfos(addrinfos, first_address_family_count=1):
+ """Interleave list of addrinfo tuples by family."""
+ # Group addresses by family
+ addrinfos_by_family = collections.OrderedDict()
+ for addr in addrinfos:
+ family = addr[0]
+ if family not in addrinfos_by_family:
+ addrinfos_by_family[family] = []
+ addrinfos_by_family[family].append(addr)
+ addrinfos_lists = list(addrinfos_by_family.values())
+
+ reordered = []
+ if first_address_family_count > 1:
+ reordered.extend(addrinfos_lists[0][:first_address_family_count - 1])
+ del addrinfos_lists[0][:first_address_family_count - 1]
+ reordered.extend(
+ a for a in itertools.chain.from_iterable(
+ itertools.zip_longest(*addrinfos_lists)
+ ) if a is not None)
+ return reordered
+
+
def _run_until_complete_cb(fut):
if not fut.cancelled():
exc = fut.exception()
@@ -871,12 +895,49 @@ def _check_sendfile_params(self, sock, file, offset, count):
"offset must be a non-negative integer (got {!r})".format(
offset))
+ async def _connect_sock(self, exceptions, addr_info, local_addr_infos=None):
+ """Create, bind and connect one socket."""
+ my_exceptions = []
+ exceptions.append(my_exceptions)
+ family, type_, proto, _, address = addr_info
+ sock = None
+ try:
+ sock = socket.socket(family=family, type=type_, proto=proto)
+ sock.setblocking(False)
+ if local_addr_infos is not None:
+ for _, _, _, _, laddr in local_addr_infos:
+ try:
+ sock.bind(laddr)
+ break
+ except OSError as exc:
+ msg = (
+ f'error while attempting to bind on '
+ f'address {laddr!r}: '
+ f'{exc.strerror.lower()}'
+ )
+ exc = OSError(exc.errno, msg)
+ my_exceptions.append(exc)
+ else: # all bind attempts failed
+ raise my_exceptions.pop()
+ await self.sock_connect(sock, address)
+ return sock
+ except OSError as exc:
+ my_exceptions.append(exc)
+ if sock is not None:
+ sock.close()
+ raise
+ except:
+ if sock is not None:
+ sock.close()
+ raise
+
async def create_connection(
self, protocol_factory, host=None, port=None,
*, ssl=None, family=0,
proto=0, flags=0, sock=None,
local_addr=None, server_hostname=None,
- ssl_handshake_timeout=None):
+ ssl_handshake_timeout=None,
+ happy_eyeballs_delay=None, interleave=None):
"""Connect to a TCP server.
Create a streaming transport connection to a given Internet host and
@@ -911,6 +972,10 @@ async def create_connection(
raise ValueError(
'ssl_handshake_timeout is only meaningful with ssl')
+ if happy_eyeballs_delay is not None and interleave is None:
+ # If using happy eyeballs, default to interleave addresses by family
+ interleave = 1
+
if host is not None or port is not None:
if sock is not None:
raise ValueError(
@@ -929,43 +994,31 @@ async def create_connection(
flags=flags, loop=self)
if not laddr_infos:
raise OSError('getaddrinfo() returned empty list')
+ else:
+ laddr_infos = None
+
+ if interleave:
+ infos = _interleave_addrinfos(infos, interleave)
exceptions = []
- for family, type, proto, cname, address in infos:
- try:
- sock = socket.socket(family=family, type=type, proto=proto)
- sock.setblocking(False)
- if local_addr is not None:
- for _, _, _, _, laddr in laddr_infos:
- try:
- sock.bind(laddr)
- break
- except OSError as exc:
- msg = (
- f'error while attempting to bind on '
- f'address {laddr!r}: '
- f'{exc.strerror.lower()}'
- )
- exc = OSError(exc.errno, msg)
- exceptions.append(exc)
- else:
- sock.close()
- sock = None
- continue
- if self._debug:
- logger.debug("connect %r to %r", sock, address)
- await self.sock_connect(sock, address)
- except OSError as exc:
- if sock is not None:
- sock.close()
- exceptions.append(exc)
- except:
- if sock is not None:
- sock.close()
- raise
- else:
- break
- else:
+ if happy_eyeballs_delay is None:
+ # not using happy eyeballs
+ for addrinfo in infos:
+ try:
+ sock = await self._connect_sock(
+ exceptions, addrinfo, laddr_infos)
+ break
+ except OSError:
+ continue
+ else: # using happy eyeballs
+ sock, _, _ = await staggered.staggered_race(
+ (functools.partial(self._connect_sock,
+ exceptions, addrinfo, laddr_infos)
+ for addrinfo in infos),
+ happy_eyeballs_delay, loop=self)
+
+ if sock is None:
+ exceptions = [exc for sub in exceptions for exc in sub]
if len(exceptions) == 1:
raise exceptions[0]
else:
@@ -1253,7 +1306,8 @@ async def create_datagram_endpoint(self, protocol_factory,
if local_addr:
sock.bind(local_address)
if remote_addr:
- await self.sock_connect(sock, remote_address)
+ if not allow_broadcast:
+ await self.sock_connect(sock, remote_address)
r_addr = remote_address
except OSError as exc:
if sock is not None:
diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py
index 163b868afeee36..9a923514db0993 100644
--- a/Lib/asyncio/events.py
+++ b/Lib/asyncio/events.py
@@ -298,7 +298,8 @@ async def create_connection(
*, ssl=None, family=0, proto=0,
flags=0, sock=None, local_addr=None,
server_hostname=None,
- ssl_handshake_timeout=None):
+ ssl_handshake_timeout=None,
+ happy_eyeballs_delay=None, interleave=None):
raise NotImplementedError
async def create_server(
diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py
index 93b6889509436c..29968214f8ed92 100644
--- a/Lib/asyncio/selector_events.py
+++ b/Lib/asyncio/selector_events.py
@@ -587,7 +587,10 @@ class _SelectorTransport(transports._FlowControlMixin,
def __init__(self, loop, sock, protocol, extra=None, server=None):
super().__init__(extra, loop)
self._extra['socket'] = sock
- self._extra['sockname'] = sock.getsockname()
+ try:
+ self._extra['sockname'] = sock.getsockname()
+ except OSError:
+ self._extra['sockname'] = None
if 'peername' not in self._extra:
try:
self._extra['peername'] = sock.getpeername()
@@ -976,9 +979,11 @@ def sendto(self, data, addr=None):
if not data:
return
- if self._address and addr not in (None, self._address):
- raise ValueError(
- f'Invalid address: must be None or {self._address}')
+ if self._address:
+ if addr not in (None, self._address):
+ raise ValueError(
+ f'Invalid address: must be None or {self._address}')
+ addr = self._address
if self._conn_lost and self._address:
if self._conn_lost >= constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:
@@ -989,7 +994,7 @@ def sendto(self, data, addr=None):
if not self._buffer:
# Attempt to send it right away first.
try:
- if self._address:
+ if self._extra['peername']:
self._sock.send(data)
else:
self._sock.sendto(data, addr)
@@ -1012,7 +1017,7 @@ def _sendto_ready(self):
while self._buffer:
data, addr = self._buffer.popleft()
try:
- if self._address:
+ if self._extra['peername']:
self._sock.send(data)
else:
self._sock.sendto(data, addr)
diff --git a/Lib/asyncio/staggered.py b/Lib/asyncio/staggered.py
new file mode 100644
index 00000000000000..feec681b4371bf
--- /dev/null
+++ b/Lib/asyncio/staggered.py
@@ -0,0 +1,147 @@
+"""Support for running coroutines in parallel with staggered start times."""
+
+__all__ = 'staggered_race',
+
+import contextlib
+import typing
+
+from . import events
+from . import futures
+from . import locks
+from . import tasks
+
+
+async def staggered_race(
+ coro_fns: typing.Iterable[typing.Callable[[], typing.Awaitable]],
+ delay: typing.Optional[float],
+ *,
+ loop: events.AbstractEventLoop = None,
+) -> typing.Tuple[
+ typing.Any,
+ typing.Optional[int],
+ typing.List[typing.Optional[Exception]]
+]:
+ """Run coroutines with staggered start times and take the first to finish.
+
+ This method takes an iterable of coroutine functions. The first one is
+ started immediately. From then on, whenever the immediately preceding one
+ fails (raises an exception), or when *delay* seconds has passed, the next
+ coroutine is started. This continues until one of the coroutines complete
+ successfully, in which case all others are cancelled, or until all
+ coroutines fail.
+
+ The coroutines provided should be well-behaved in the following way:
+
+ * They should only ``return`` if completed successfully.
+
+ * They should always raise an exception if they did not complete
+ successfully. In particular, if they handle cancellation, they should
+ probably reraise, like this::
+
+ try:
+ # do work
+ except asyncio.CancelledError:
+ # undo partially completed work
+ raise
+
+ Args:
+ coro_fns: an iterable of coroutine functions, i.e. callables that
+ return a coroutine object when called. Use ``functools.partial`` or
+ lambdas to pass arguments.
+
+ delay: amount of time, in seconds, between starting coroutines. If
+ ``None``, the coroutines will run sequentially.
+
+ loop: the event loop to use.
+
+ Returns:
+ tuple *(winner_result, winner_index, exceptions)* where
+
+ - *winner_result*: the result of the winning coroutine, or ``None``
+ if no coroutines won.
+
+ - *winner_index*: the index of the winning coroutine in
+ ``coro_fns``, or ``None`` if no coroutines won. If the winning
+ coroutine may return None on success, *winner_index* can be used
+ to definitively determine whether any coroutine won.
+
+ - *exceptions*: list of exceptions returned by the coroutines.
+ ``len(exceptions)`` is equal to the number of coroutines actually
+ started, and the order is the same as in ``coro_fns``. The winning
+ coroutine's entry is ``None``.
+
+ """
+ # TODO: when we have aiter() and anext(), allow async iterables in coro_fns.
+ loop = loop or events.get_running_loop()
+ enum_coro_fns = enumerate(coro_fns)
+ winner_result = None
+ winner_index = None
+ exceptions = []
+ running_tasks = []
+
+ async def run_one_coro(
+ previous_failed: typing.Optional[locks.Event]) -> None:
+ # Wait for the previous task to finish, or for delay seconds
+ if previous_failed is not None:
+ with contextlib.suppress(futures.TimeoutError):
+ # Use asyncio.wait_for() instead of asyncio.wait() here, so
+ # that if we get cancelled at this point, Event.wait() is also
+ # cancelled, otherwise there will be a "Task destroyed but it is
+ # pending" later.
+ await tasks.wait_for(previous_failed.wait(), delay)
+ # Get the next coroutine to run
+ try:
+ this_index, coro_fn = next(enum_coro_fns)
+ except StopIteration:
+ return
+ # Start task that will run the next coroutine
+ this_failed = locks.Event()
+ next_task = loop.create_task(run_one_coro(this_failed))
+ running_tasks.append(next_task)
+ assert len(running_tasks) == this_index + 2
+ # Prepare place to put this coroutine's exceptions if not won
+ exceptions.append(None)
+ assert len(exceptions) == this_index + 1
+
+ try:
+ result = await coro_fn()
+ except Exception as e:
+ exceptions[this_index] = e
+ this_failed.set() # Kickstart the next coroutine
+ else:
+ # Store winner's results
+ nonlocal winner_index, winner_result
+ assert winner_index is None
+ winner_index = this_index
+ winner_result = result
+ # Cancel all other tasks. We take care to not cancel the current
+ # task as well. If we do so, then since there is no `await` after
+ # here and CancelledError are usually thrown at one, we will
+ # encounter a curious corner case where the current task will end
+ # up as done() == True, cancelled() == False, exception() ==
+ # asyncio.CancelledError. This behavior is specified in
+ # https://bugs.python.org/issue30048
+ for i, t in enumerate(running_tasks):
+ if i != this_index:
+ t.cancel()
+
+ first_task = loop.create_task(run_one_coro(None))
+ running_tasks.append(first_task)
+ try:
+ # Wait for a growing list of tasks to all finish: poor man's version of
+ # curio's TaskGroup or trio's nursery
+ done_count = 0
+ while done_count != len(running_tasks):
+ done, _ = await tasks.wait(running_tasks)
+ done_count = len(done)
+ # If run_one_coro raises an unhandled exception, it's probably a
+ # programming error, and I want to see it.
+ if __debug__:
+ for d in done:
+ if d.done() and not d.cancelled() and d.exception():
+ raise d.exception()
+ return winner_result, winner_index, exceptions
+ finally:
+ # Make sure no tasks are left running if we leave this function
+ for t in running_tasks:
+ t.cancel()
diff --git a/Lib/asyncio/streams.py b/Lib/asyncio/streams.py
index 33fc303a6ffcfc..79adf028212f87 100644
--- a/Lib/asyncio/streams.py
+++ b/Lib/asyncio/streams.py
@@ -4,6 +4,7 @@
import socket
import sys
+import warnings
import weakref
if hasattr(socket, 'AF_UNIX'):
@@ -42,11 +43,14 @@ async def open_connection(host=None, port=None, *,
"""
if loop is None:
loop = events.get_event_loop()
- reader = StreamReader(limit=limit, loop=loop)
- protocol = StreamReaderProtocol(reader, loop=loop)
+ reader = StreamReader(limit=limit, loop=loop,
+ _asyncio_internal=True)
+ protocol = StreamReaderProtocol(reader, loop=loop,
+ _asyncio_internal=True)
transport, _ = await loop.create_connection(
lambda: protocol, host, port, **kwds)
- writer = StreamWriter(transport, protocol, reader, loop)
+ writer = StreamWriter(transport, protocol, reader, loop,
+ _asyncio_internal=True)
return reader, writer
@@ -77,9 +81,11 @@ async def start_server(client_connected_cb, host=None, port=None, *,
loop = events.get_event_loop()
def factory():
- reader = StreamReader(limit=limit, loop=loop)
+ reader = StreamReader(limit=limit, loop=loop,
+ _asyncio_internal=True)
protocol = StreamReaderProtocol(reader, client_connected_cb,
- loop=loop)
+ loop=loop,
+ _asyncio_internal=True)
return protocol
return await loop.create_server(factory, host, port, **kwds)
@@ -93,11 +99,14 @@ async def open_unix_connection(path=None, *,
"""Similar to `open_connection` but works with UNIX Domain Sockets."""
if loop is None:
loop = events.get_event_loop()
- reader = StreamReader(limit=limit, loop=loop)
- protocol = StreamReaderProtocol(reader, loop=loop)
+ reader = StreamReader(limit=limit, loop=loop,
+ _asyncio_internal=True)
+ protocol = StreamReaderProtocol(reader, loop=loop,
+ _asyncio_internal=True)
transport, _ = await loop.create_unix_connection(
lambda: protocol, path, **kwds)
- writer = StreamWriter(transport, protocol, reader, loop)
+ writer = StreamWriter(transport, protocol, reader, loop,
+ _asyncio_internal=True)
return reader, writer
async def start_unix_server(client_connected_cb, path=None, *,
@@ -107,9 +116,11 @@ async def start_unix_server(client_connected_cb, path=None, *,
loop = events.get_event_loop()
def factory():
- reader = StreamReader(limit=limit, loop=loop)
+ reader = StreamReader(limit=limit, loop=loop,
+ _asyncio_internal=True)
protocol = StreamReaderProtocol(reader, client_connected_cb,
- loop=loop)
+ loop=loop,
+ _asyncio_internal=True)
return protocol
return await loop.create_unix_server(factory, path, **kwds)
@@ -125,11 +136,20 @@ class FlowControlMixin(protocols.Protocol):
StreamWriter.drain() must wait for _drain_helper() coroutine.
"""
- def __init__(self, loop=None):
+ def __init__(self, loop=None, *, _asyncio_internal=False):
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
+ if not _asyncio_internal:
+ # NOTE:
+ # Avoid inheritance from FlowControlMixin
+ # Copy-paste the code to your project
+ # if you need flow control helpers
+ warnings.warn(f"{self.__class__} should be instaniated "
+ "by asyncio internals only, "
+ "please avoid its creation from user code",
+ DeprecationWarning)
self._paused = False
self._drain_waiter = None
self._connection_lost = False
@@ -179,6 +199,9 @@ async def _drain_helper(self):
self._drain_waiter = waiter
await waiter
+ def _get_close_waiter(self, stream):
+ raise NotImplementedError
+
class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
"""Helper class to adapt between Protocol and StreamReader.
@@ -191,8 +214,9 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
_source_traceback = None
- def __init__(self, stream_reader, client_connected_cb=None, loop=None):
- super().__init__(loop=loop)
+ def __init__(self, stream_reader, client_connected_cb=None, loop=None,
+ *, _asyncio_internal=False):
+ super().__init__(loop=loop, _asyncio_internal=_asyncio_internal)
if stream_reader is not None:
self._stream_reader_wr = weakref.ref(stream_reader,
self._on_reader_gc)
@@ -253,7 +277,8 @@ def connection_made(self, transport):
if self._client_connected_cb is not None:
self._stream_writer = StreamWriter(transport, self,
reader,
- self._loop)
+ self._loop,
+ _asyncio_internal=True)
res = self._client_connected_cb(reader,
self._stream_writer)
if coroutines.iscoroutine(res):
@@ -293,6 +318,9 @@ def eof_received(self):
return False
return True
+ def _get_close_waiter(self, stream):
+ return self._closed
+
def __del__(self):
# Prevent reports about unhandled exceptions.
# Better than self._closed._log_traceback = False hack
@@ -311,7 +339,13 @@ class StreamWriter:
directly.
"""
- def __init__(self, transport, protocol, reader, loop):
+ def __init__(self, transport, protocol, reader, loop,
+ *, _asyncio_internal=False):
+ if not _asyncio_internal:
+ warnings.warn(f"{self.__class__} should be instaniated "
+ "by asyncio internals only, "
+ "please avoid its creation from user code",
+ DeprecationWarning)
self._transport = transport
self._protocol = protocol
# drain() expects that the reader has an exception() method
@@ -348,7 +382,7 @@ def is_closing(self):
return self._transport.is_closing()
async def wait_closed(self):
- await self._protocol._closed
+ await self._protocol._get_close_waiter(self)
def get_extra_info(self, name, default=None):
return self._transport.get_extra_info(name, default)
@@ -366,13 +400,12 @@ async def drain(self):
if exc is not None:
raise exc
if self._transport.is_closing():
- # Yield to the event loop so connection_lost() may be
- # called. Without this, _drain_helper() would return
- # immediately, and code that calls
- # write(...); await drain()
- # in a loop would never call connection_lost(), so it
- # would not see an error when the socket is closed.
- await sleep(0, loop=self._loop)
+ # Wait for protocol.connection_lost() call
+ # Raise connection closing error if any,
+ # ConnectionResetError otherwise
+ fut = self._protocol._get_close_waiter(self)
+ await fut
+ raise ConnectionResetError('Connection lost')
await self._protocol._drain_helper()
async def aclose(self):
@@ -388,7 +421,14 @@ class StreamReader:
_source_traceback = None
- def __init__(self, limit=_DEFAULT_LIMIT, loop=None):
+ def __init__(self, limit=_DEFAULT_LIMIT, loop=None,
+ *, _asyncio_internal=False):
+ if not _asyncio_internal:
+ warnings.warn(f"{self.__class__} should be instaniated "
+ "by asyncio internals only, "
+ "please avoid its creation from user code",
+ DeprecationWarning)
+
# The line length limit is a security feature;
# it also doubles as half the buffer limit.
diff --git a/Lib/asyncio/subprocess.py b/Lib/asyncio/subprocess.py
index 90fc00de8339fb..d34b6118fdcf72 100644
--- a/Lib/asyncio/subprocess.py
+++ b/Lib/asyncio/subprocess.py
@@ -1,6 +1,7 @@
__all__ = 'create_subprocess_exec', 'create_subprocess_shell'
import subprocess
+import warnings
from . import events
from . import protocols
@@ -18,13 +19,14 @@ class SubprocessStreamProtocol(streams.FlowControlMixin,
protocols.SubprocessProtocol):
"""Like StreamReaderProtocol, but for a subprocess."""
- def __init__(self, limit, loop):
- super().__init__(loop=loop)
+ def __init__(self, limit, loop, *, _asyncio_internal=False):
+ super().__init__(loop=loop, _asyncio_internal=_asyncio_internal)
self._limit = limit
self.stdin = self.stdout = self.stderr = None
self._transport = None
self._process_exited = False
self._pipe_fds = []
+ self._stdin_closed = self._loop.create_future()
def __repr__(self):
info = [self.__class__.__name__]
@@ -42,14 +44,16 @@ def connection_made(self, transport):
stdout_transport = transport.get_pipe_transport(1)
if stdout_transport is not None:
self.stdout = streams.StreamReader(limit=self._limit,
- loop=self._loop)
+ loop=self._loop,
+ _asyncio_internal=True)
self.stdout.set_transport(stdout_transport)
self._pipe_fds.append(1)
stderr_transport = transport.get_pipe_transport(2)
if stderr_transport is not None:
self.stderr = streams.StreamReader(limit=self._limit,
- loop=self._loop)
+ loop=self._loop,
+ _asyncio_internal=True)
self.stderr.set_transport(stderr_transport)
self._pipe_fds.append(2)
@@ -58,7 +62,8 @@ def connection_made(self, transport):
self.stdin = streams.StreamWriter(stdin_transport,
protocol=self,
reader=None,
- loop=self._loop)
+ loop=self._loop,
+ _asyncio_internal=True)
def pipe_data_received(self, fd, data):
if fd == 1:
@@ -76,6 +81,10 @@ def pipe_connection_lost(self, fd, exc):
if pipe is not None:
pipe.close()
self.connection_lost(exc)
+ if exc is None:
+ self._stdin_closed.set_result(None)
+ else:
+ self._stdin_closed.set_exception(exc)
return
if fd == 1:
reader = self.stdout
@@ -102,9 +111,19 @@ def _maybe_close_transport(self):
self._transport.close()
self._transport = None
+ def _get_close_waiter(self, stream):
+ if stream is self.stdin:
+ return self._stdin_closed
+
class Process:
- def __init__(self, transport, protocol, loop):
+ def __init__(self, transport, protocol, loop, *, _asyncio_internal=False):
+ if not _asyncio_internal:
+ warnings.warn(f"{self.__class__} should be instaniated "
+ "by asyncio internals only, "
+ "please avoid its creation from user code",
+ DeprecationWarning)
+
self._transport = transport
self._protocol = protocol
self._loop = loop
@@ -195,12 +214,13 @@ async def create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None,
if loop is None:
loop = events.get_event_loop()
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
- loop=loop)
+ loop=loop,
+ _asyncio_internal=True)
transport, protocol = await loop.subprocess_shell(
protocol_factory,
cmd, stdin=stdin, stdout=stdout,
stderr=stderr, **kwds)
- return Process(transport, protocol, loop)
+ return Process(transport, protocol, loop, _asyncio_internal=True)
async def create_subprocess_exec(program, *args, stdin=None, stdout=None,
@@ -209,10 +229,11 @@ async def create_subprocess_exec(program, *args, stdin=None, stdout=None,
if loop is None:
loop = events.get_event_loop()
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
- loop=loop)
+ loop=loop,
+ _asyncio_internal=True)
transport, protocol = await loop.subprocess_exec(
protocol_factory,
program, *args,
stdin=stdin, stdout=stdout,
stderr=stderr, **kwds)
- return Process(transport, protocol, loop)
+ return Process(transport, protocol, loop, _asyncio_internal=True)
diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py
index d8508376d92a48..211b9126b0114a 100644
--- a/Lib/asyncio/tasks.py
+++ b/Lib/asyncio/tasks.py
@@ -495,10 +495,11 @@ def _on_completion(f):
finally:
if timeout_handle is not None:
timeout_handle.cancel()
+ for f in fs:
+ f.remove_done_callback(_on_completion)
done, pending = set(), set()
for f in fs:
- f.remove_done_callback(_on_completion)
if f.done():
done.add(f)
else:
@@ -627,7 +628,8 @@ def ensure_future(coro_or_future, *, loop=None):
return task
elif futures.isfuture(coro_or_future):
if loop is not None and loop is not futures._get_loop(coro_or_future):
- raise ValueError('loop argument must agree with Future')
+ raise ValueError('The future belongs to a different loop than '
+ 'the one specified as the loop argument')
return coro_or_future
elif inspect.isawaitable(coro_or_future):
return ensure_future(_wrap_awaitable(coro_or_future), loop=loop)
@@ -816,7 +818,7 @@ def shield(arg, *, loop=None):
loop = futures._get_loop(inner)
outer = loop.create_future()
- def _done_callback(inner):
+ def _inner_done_callback(inner):
if outer.cancelled():
if not inner.cancelled():
# Mark inner's result as retrieved.
@@ -832,7 +834,13 @@ def _done_callback(inner):
else:
outer.set_result(inner.result())
- inner.add_done_callback(_done_callback)
+
+ def _outer_done_callback(outer):
+ if not inner.done():
+ inner.remove_done_callback(_inner_done_callback)
+
+ inner.add_done_callback(_inner_done_callback)
+ outer.add_done_callback(_outer_done_callback)
return outer
diff --git a/Lib/asyncore.py b/Lib/asyncore.py
index 828f4d4fe7897b..0e92be3ad1912c 100644
--- a/Lib/asyncore.py
+++ b/Lib/asyncore.py
@@ -262,8 +262,6 @@ def __repr__(self):
status.append(repr(self.addr))
return '<%s at %#x>' % (' '.join(status), id(self))
- __str__ = __repr__
-
def add_channel(self, map=None):
#self.log_info('adding channel %s' % self)
if map is None:
diff --git a/Lib/bdb.py b/Lib/bdb.py
index 54aa98437450a2..69174364c46aef 100644
--- a/Lib/bdb.py
+++ b/Lib/bdb.py
@@ -649,6 +649,7 @@ def runcall(*args, **kwds):
self.quitting = True
sys.settrace(None)
return res
+ runcall.__text_signature__ = '($self, func, /, *args, **kwds)'
def set_trace():
diff --git a/Lib/cProfile.py b/Lib/cProfile.py
index 2e449cc576cebd..369d02e22e24aa 100755
--- a/Lib/cProfile.py
+++ b/Lib/cProfile.py
@@ -124,6 +124,7 @@ def runcall(*args, **kw):
return func(*args, **kw)
finally:
self.disable()
+ runcall.__text_signature__ = '($self, func, /, *args, **kw)'
def __enter__(self):
self.enable()
diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py
index 9657c1cf83bc58..706907ad4a282d 100644
--- a/Lib/collections/__init__.py
+++ b/Lib/collections/__init__.py
@@ -1018,6 +1018,8 @@ def __init__(*args, **kwargs):
self.update(dict)
if kwargs:
self.update(kwargs)
+ __init__.__text_signature__ = '($self, dict=None, /, **kwargs)'
+
def __len__(self): return len(self.data)
def __getitem__(self, key):
if key in self.data:
@@ -1083,7 +1085,11 @@ def __cast(self, other):
return other.data if isinstance(other, UserList) else other
def __contains__(self, item): return item in self.data
def __len__(self): return len(self.data)
- def __getitem__(self, i): return self.data[i]
+ def __getitem__(self, i):
+ if isinstance(i, slice):
+ return self.__class__(self.data[i])
+ else:
+ return self.data[i]
def __setitem__(self, i, item): self.data[i] = item
def __delitem__(self, i): del self.data[i]
def __add__(self, other):
diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py
index ea16eef841c518..8f155f0ea82bdc 100644
--- a/Lib/concurrent/futures/_base.py
+++ b/Lib/concurrent/futures/_base.py
@@ -567,6 +567,7 @@ def submit(*args, **kwargs):
'got %d' % (len(args)-1))
raise NotImplementedError()
+ submit.__text_signature__ = '($self, fn, /, *args, **kwargs)'
def map(self, fn, *iterables, timeout=None, chunksize=1):
"""Returns an iterator equivalent to map(fn, iter).
diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py
index e6ce278b5d44c6..d7e2478d9227b5 100644
--- a/Lib/concurrent/futures/process.py
+++ b/Lib/concurrent/futures/process.py
@@ -51,7 +51,7 @@
import queue
from queue import Full
import multiprocessing as mp
-from multiprocessing.connection import wait
+import multiprocessing.connection
from multiprocessing.queues import Queue
import threading
import weakref
@@ -352,7 +352,7 @@ def shutdown_worker():
# submitted, from the executor being shutdown/gc-ed, or from the
# shutdown of the python interpreter.
worker_sentinels = [p.sentinel for p in processes.values()]
- ready = wait(readers + worker_sentinels)
+ ready = mp.connection.wait(readers + worker_sentinels)
cause = None
is_broken = True
@@ -630,6 +630,7 @@ def submit(*args, **kwargs):
self._start_queue_management_thread()
return f
+ submit.__text_signature__ = _base.Executor.submit.__text_signature__
submit.__doc__ = _base.Executor.submit.__doc__
def map(self, fn, *iterables, timeout=None, chunksize=1):
diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py
index 0a61e3a9ac1bd1..2af31a106dd914 100644
--- a/Lib/concurrent/futures/thread.py
+++ b/Lib/concurrent/futures/thread.py
@@ -174,6 +174,7 @@ def submit(*args, **kwargs):
self._work_queue.put(w)
self._adjust_thread_count()
return f
+ submit.__text_signature__ = _base.Executor.submit.__text_signature__
submit.__doc__ = _base.Executor.submit.__doc__
def _adjust_thread_count(self):
diff --git a/Lib/contextlib.py b/Lib/contextlib.py
index ae498a2b6ef5e2..de989a001c6dfb 100644
--- a/Lib/contextlib.py
+++ b/Lib/contextlib.py
@@ -454,6 +454,7 @@ def callback(*args, **kwds):
_exit_wrapper.__wrapped__ = callback
self._push_exit_callback(_exit_wrapper)
return callback # Allow use as a decorator
+ callback.__text_signature__ = '($self, callback, /, *args, **kwds)'
def _push_cm_exit(self, cm, cm_exit):
"""Helper to correctly register callbacks to __exit__ methods."""
@@ -615,6 +616,7 @@ def push_async_callback(*args, **kwds):
_exit_wrapper.__wrapped__ = callback
self._push_exit_callback(_exit_wrapper, False)
return callback # Allow use as a decorator
+ push_async_callback.__text_signature__ = '($self, callback, /, *args, **kwds)'
async def aclose(self):
"""Immediately unwind the context stack."""
diff --git a/Lib/ctypes/test/test_values.py b/Lib/ctypes/test/test_values.py
index b38b63f870a653..87eb9198ade0c7 100644
--- a/Lib/ctypes/test/test_values.py
+++ b/Lib/ctypes/test/test_values.py
@@ -80,9 +80,9 @@ class struct_frozen(Structure):
continue
items.append((entry.name.decode("ascii"), entry.size))
- expected = [("__hello__", 139),
- ("__phello__", -139),
- ("__phello__.spam", 139),
+ expected = [("__hello__", 141),
+ ("__phello__", -141),
+ ("__phello__.spam", 141),
]
self.assertEqual(items, expected, "PyImport_FrozenModules example "
"in Doc/library/ctypes.rst may be out of date")
diff --git a/Lib/curses/__init__.py b/Lib/curses/__init__.py
index 44a198428820f7..24ff3ca93a8933 100644
--- a/Lib/curses/__init__.py
+++ b/Lib/curses/__init__.py
@@ -110,3 +110,4 @@ def wrapper(*args, **kwds):
echo()
nocbreak()
endwin()
+wrapper.__text_signature__ = '(func, /, *args, **kwds)'
diff --git a/Lib/datetime.py b/Lib/datetime.py
index 85bfa48e05dea4..0e64815944dbd7 100644
--- a/Lib/datetime.py
+++ b/Lib/datetime.py
@@ -884,6 +884,40 @@ def fromisoformat(cls, date_string):
except Exception:
raise ValueError(f'Invalid isoformat string: {date_string!r}')
+ @classmethod
+ def fromisocalendar(cls, year, week, day):
+ """Construct a date from the ISO year, week number and weekday.
+
+ This is the inverse of the date.isocalendar() function"""
+ # Year is bounded this way because 9999-12-31 is (9999, 52, 5)
+ if not MINYEAR <= year <= MAXYEAR:
+ raise ValueError(f"Year is out of range: {year}")
+
+ if not 0 < week < 53:
+ out_of_range = True
+
+ if week == 53:
+ # ISO years have 53 weeks in them on years starting with a
+ # Thursday and leap years starting on a Wednesday
+ first_weekday = _ymd2ord(year, 1, 1) % 7
+ if (first_weekday == 4 or (first_weekday == 3 and
+ _is_leap(year))):
+ out_of_range = False
+
+ if out_of_range:
+ raise ValueError(f"Invalid week: {week}")
+
+ if not 0 < day < 8:
+ raise ValueError(f"Invalid weekday: {day} (range is [1, 7])")
+
+ # Now compute the offset from (Y, 1, 1) in days:
+ day_offset = (week - 1) * 7 + (day - 1)
+
+ # Calculate the ordinal day for monday, week 1
+ day_1 = _isoweek1monday(year)
+ ord_day = day_1 + day_offset
+
+ return cls(*_ord2ymd(ord_day))
# Conversions to string
@@ -2141,6 +2175,7 @@ def _isoweek1monday(year):
week1monday += 7
return week1monday
+
class timezone(tzinfo):
__slots__ = '_offset', '_name'
diff --git a/Lib/dbm/__init__.py b/Lib/dbm/__init__.py
index 6831a844073740..f65da521af4da8 100644
--- a/Lib/dbm/__init__.py
+++ b/Lib/dbm/__init__.py
@@ -82,7 +82,8 @@ def open(file, flag='r', mode=0o666):
# file doesn't exist and the new flag was used so use default type
mod = _defaultmod
else:
- raise error[0]("need 'c' or 'n' flag to open new db")
+ raise error[0]("db file doesn't exist; "
+ "use 'c' or 'n' flag to create a new db")
elif result == "":
# db type cannot be determined
raise error[0]("db type could not be determined")
diff --git a/Lib/dis.py b/Lib/dis.py
index b2b0003203a44f..a25fb2b417643d 100644
--- a/Lib/dis.py
+++ b/Lib/dis.py
@@ -157,6 +157,7 @@ def _format_code_info(co):
lines.append("Name: %s" % co.co_name)
lines.append("Filename: %s" % co.co_filename)
lines.append("Argument count: %s" % co.co_argcount)
+ lines.append("Positional-only arguments: %s" % co.co_posonlyargcount)
lines.append("Kw-only arguments: %s" % co.co_kwonlyargcount)
lines.append("Number of locals: %s" % co.co_nlocals)
lines.append("Stack size: %s" % co.co_stacksize)
diff --git a/Lib/distutils/_msvccompiler.py b/Lib/distutils/_msvccompiler.py
index 58b20a21024733..c7ac3f049ebf22 100644
--- a/Lib/distutils/_msvccompiler.py
+++ b/Lib/distutils/_msvccompiler.py
@@ -89,13 +89,24 @@ def _find_vc2017():
return None, None
+PLAT_SPEC_TO_RUNTIME = {
+ 'x86' : 'x86',
+ 'x86_amd64' : 'x64',
+ 'x86_arm' : 'arm',
+}
+
def _find_vcvarsall(plat_spec):
_, best_dir = _find_vc2017()
vcruntime = None
- vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
+
+ if plat_spec in PLAT_SPEC_TO_RUNTIME:
+ vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec]
+ else:
+ vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
+
if best_dir:
vcredist = os.path.join(best_dir, "..", "..", "redist", "MSVC", "**",
- "Microsoft.VC141.CRT", "vcruntime140.dll")
+ vcruntime_plat, "Microsoft.VC141.CRT", "vcruntime140.dll")
try:
import glob
vcruntime = glob.glob(vcredist, recursive=True)[-1]
@@ -178,6 +189,7 @@ def _find_exe(exe, paths=None):
PLAT_TO_VCVARS = {
'win32' : 'x86',
'win-amd64' : 'x86_amd64',
+ 'win-arm32' : 'x86_arm',
}
# A set containing the DLLs that are guaranteed to be available for
diff --git a/Lib/distutils/command/build.py b/Lib/distutils/command/build.py
index c6f52e61e1bc70..a86df0bc7f9218 100644
--- a/Lib/distutils/command/build.py
+++ b/Lib/distutils/command/build.py
@@ -116,7 +116,7 @@ def finalize_options(self):
self.build_scripts = os.path.join(self.build_base,
'scripts-%d.%d' % sys.version_info[:2])
- if self.executable is None:
+ if self.executable is None and sys.executable:
self.executable = os.path.normpath(sys.executable)
if isinstance(self.parallel, str):
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
index 0428466b00c902..c3b9602461f93a 100644
--- a/Lib/distutils/command/build_ext.py
+++ b/Lib/distutils/command/build_ext.py
@@ -714,20 +714,20 @@ def get_libraries(self, ext):
# don't extend ext.libraries, it may be shared with other
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
- else:
- return ext.libraries
- elif sys.platform == 'darwin':
- # Don't use the default code below
- return ext.libraries
- elif sys.platform[:3] == 'aix':
- # Don't use the default code below
- return ext.libraries
+ # On Android only the main executable and LD_PRELOADs are considered
+ # to be RTLD_GLOBAL, all the dependencies of the main executable
+ # remain RTLD_LOCAL and so the shared libraries must be linked with
+ # libpython when python is built with a shared python library (issue
+ # bpo-21536).
else:
- from distutils import sysconfig
- if sysconfig.get_config_var('Py_ENABLE_SHARED'):
- pythonlib = 'python{}.{}{}'.format(
- sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff,
- sysconfig.get_config_var('ABIFLAGS'))
- return ext.libraries + [pythonlib]
- else:
- return ext.libraries
+ from distutils.sysconfig import get_config_var
+ if get_config_var('Py_ENABLE_SHARED'):
+ # Either a native build on an Android device or the
+ # cross-compilation of Python.
+ if (hasattr(sys, 'getandroidapilevel') or
+ ('_PYTHON_HOST_PLATFORM' in os.environ and
+ get_config_var('ANDROID_API_LEVEL') != 0)):
+ ldversion = get_config_var('LDVERSION')
+ return ext.libraries + ['python' + ldversion]
+
+ return ext.libraries
diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py
index 53876880932795..ceb94945dc8bed 100644
--- a/Lib/distutils/spawn.py
+++ b/Lib/distutils/spawn.py
@@ -81,7 +81,6 @@ def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
"command %r failed with exit status %d" % (cmd, rc))
if sys.platform == 'darwin':
- from distutils import sysconfig
_cfg_target = None
_cfg_target_split = None
@@ -95,6 +94,7 @@ def _spawn_posix(cmd, search_path=1, verbose=0, dry_run=0):
if sys.platform == 'darwin':
global _cfg_target, _cfg_target_split
if _cfg_target is None:
+ from distutils import sysconfig
_cfg_target = sysconfig.get_config_var(
'MACOSX_DEPLOYMENT_TARGET') or ''
if _cfg_target:
@@ -172,21 +172,32 @@ def find_executable(executable, path=None):
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
- if path is None:
- path = os.environ.get('PATH', os.defpath)
-
- paths = path.split(os.pathsep)
- base, ext = os.path.splitext(executable)
-
+ _, ext = os.path.splitext(executable)
if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
- if not os.path.isfile(executable):
- for p in paths:
- f = os.path.join(p, executable)
- if os.path.isfile(f):
- # the file exists, we have a shot at spawn working
- return f
- return None
- else:
+ if os.path.isfile(executable):
return executable
+
+ if path is None:
+ path = os.environ.get('PATH', None)
+ if path is None:
+ try:
+ path = os.confstr("CS_PATH")
+ except (AttributeError, ValueError):
+ # os.confstr() or CS_PATH is not available
+ path = os.defpath
+ # bpo-35755: Don't use os.defpath if the PATH environment variable is
+ # set to an empty string
+
+ # PATH='' doesn't match, whereas PATH=':' looks in the current directory
+ if not path:
+ return None
+
+ paths = path.split(os.pathsep)
+ for p in paths:
+ f = os.path.join(p, executable)
+ if os.path.isfile(f):
+ # the file exists, we have a shot at spawn working
+ return f
+ return None
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index a3494670db18c1..b51629eb94f825 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -15,6 +15,7 @@
import sys
from .errors import DistutilsPlatformError
+from .util import get_platform, get_host_platform
# These are needed in a couple of spots, so just compute them once.
PREFIX = os.path.normpath(sys.prefix)
@@ -28,7 +29,12 @@
if "_PYTHON_PROJECT_BASE" in os.environ:
project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
else:
- project_base = os.path.dirname(os.path.abspath(sys.executable))
+ if sys.executable:
+ project_base = os.path.dirname(os.path.abspath(sys.executable))
+ else:
+ # sys.executable can be empty if argv[0] has been changed and Python is
+ # unable to retrieve the real program name
+ project_base = os.getcwd()
# python_build: (Boolean) if true, we're either building Python or
diff --git a/Lib/distutils/tests/test_spawn.py b/Lib/distutils/tests/test_spawn.py
index 0d455385d8ace9..f9ae69ef86b3da 100644
--- a/Lib/distutils/tests/test_spawn.py
+++ b/Lib/distutils/tests/test_spawn.py
@@ -87,11 +87,52 @@ def test_find_executable(self):
rv = find_executable(dont_exist_program , path=tmp_dir)
self.assertIsNone(rv)
- # test os.defpath: missing PATH environment variable
+ # PATH='': no match, except in the current directory
with test_support.EnvironmentVarGuard() as env:
- with mock.patch('distutils.spawn.os.defpath', tmp_dir):
- env.pop('PATH')
+ env['PATH'] = ''
+ with unittest.mock.patch('distutils.spawn.os.confstr',
+ return_value=tmp_dir, create=True), \
+ unittest.mock.patch('distutils.spawn.os.defpath',
+ tmp_dir):
+ rv = find_executable(program)
+ self.assertIsNone(rv)
+
+ # look in current directory
+ with test_support.change_cwd(tmp_dir):
+ rv = find_executable(program)
+ self.assertEqual(rv, program)
+
+ # PATH=':': explicitly looks in the current directory
+ with test_support.EnvironmentVarGuard() as env:
+ env['PATH'] = os.pathsep
+ with unittest.mock.patch('distutils.spawn.os.confstr',
+ return_value='', create=True), \
+ unittest.mock.patch('distutils.spawn.os.defpath', ''):
+ rv = find_executable(program)
+ self.assertIsNone(rv)
+
+ # look in current directory
+ with test_support.change_cwd(tmp_dir):
+ rv = find_executable(program)
+ self.assertEqual(rv, program)
+
+ # missing PATH: test os.confstr("CS_PATH") and os.defpath
+ with test_support.EnvironmentVarGuard() as env:
+ env.pop('PATH', None)
+
+ # without confstr
+ with unittest.mock.patch('distutils.spawn.os.confstr',
+ side_effect=ValueError,
+ create=True), \
+ unittest.mock.patch('distutils.spawn.os.defpath',
+ tmp_dir):
+ rv = find_executable(program)
+ self.assertEqual(rv, filename)
+ # with confstr
+ with unittest.mock.patch('distutils.spawn.os.confstr',
+ return_value=tmp_dir, create=True), \
+ unittest.mock.patch('distutils.spawn.os.defpath', ''):
rv = find_executable(program)
self.assertEqual(rv, filename)
diff --git a/Lib/distutils/util.py b/Lib/distutils/util.py
index 15cd2ad9a9afb8..50550e1893418c 100644
--- a/Lib/distutils/util.py
+++ b/Lib/distutils/util.py
@@ -15,7 +15,7 @@
from distutils import log
from distutils.errors import DistutilsByteCompileError
-def get_platform ():
+def get_host_platform():
"""Return a string that identifies the current platform. This is used mainly to
distinguish platform-specific build directories and platform-specific built
distributions. Typically includes the OS name and version and the
@@ -38,6 +38,8 @@ def get_platform ():
if os.name == 'nt':
if 'amd64' in sys.version.lower():
return 'win-amd64'
+ if '(arm)' in sys.version.lower():
+ return 'win-arm32'
return sys.platform
# Set for cross builds explicitly
@@ -90,8 +92,16 @@ def get_platform ():
return "%s-%s-%s" % (osname, release, machine)
-# get_platform ()
-
+def get_platform():
+ if os.name == 'nt':
+ TARGET_TO_PLAT = {
+ 'x86' : 'win32',
+ 'x64' : 'win-amd64',
+ 'arm' : 'win-arm32',
+ }
+ return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform()
+ else:
+ return get_host_platform()
def convert_path (pathname):
"""Return 'pathname' as a name that will work on the native filesystem,
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 79d91a040c2eee..bf4889f59e0da4 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -2300,7 +2300,7 @@ def __repr__(self):
name = self._dt_test.name.split('.')
return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
- __str__ = __repr__
+ __str__ = object.__str__
def shortDescription(self):
return "Doctest: " + self._dt_test.name
@@ -2399,7 +2399,6 @@ def id(self):
def __repr__(self):
return self._dt_test.filename
- __str__ = __repr__
def format_failure(self, err):
return ('Failed doctest test for %s\n File "%s", line 0\n\n%s'
diff --git a/Lib/email/charset.py b/Lib/email/charset.py
index ee564040c68f8b..d3d759ad9115f0 100644
--- a/Lib/email/charset.py
+++ b/Lib/email/charset.py
@@ -241,11 +241,9 @@ def __init__(self, input_charset=DEFAULT_CHARSET):
self.output_codec = CODEC_MAP.get(self.output_charset,
self.output_charset)
- def __str__(self):
+ def __repr__(self):
return self.input_charset.lower()
- __repr__ = __str__
-
def __eq__(self, other):
return str(self) == str(other).lower()
diff --git a/Lib/fileinput.py b/Lib/fileinput.py
index 4a71cc5ff31872..0764aa5e4d2480 100644
--- a/Lib/fileinput.py
+++ b/Lib/fileinput.py
@@ -222,6 +222,7 @@ def __init__(self, files=None, inplace=False, backup="", bufsize=0,
warnings.warn("'U' mode is deprecated",
DeprecationWarning, 2)
self._mode = mode
+ self._write_mode = mode.replace('r', 'w') if 'U' not in mode else 'w'
if openhook:
if inplace:
raise ValueError("FileInput cannot use an opening hook in inplace mode")
@@ -348,14 +349,14 @@ def _readline(self):
try:
perm = os.fstat(self._file.fileno()).st_mode
except OSError:
- self._output = open(self._filename, "w")
+ self._output = open(self._filename, self._write_mode)
else:
mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
if hasattr(os, 'O_BINARY'):
mode |= os.O_BINARY
fd = os.open(self._filename, mode, perm)
- self._output = os.fdopen(fd, "w")
+ self._output = os.fdopen(fd, self._write_mode)
try:
os.chmod(self._filename, perm)
except OSError:
diff --git a/Lib/functools.py b/Lib/functools.py
index 1f1874db9b4cce..28d9f6f75fdb8b 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -388,6 +388,7 @@ def __init__(*args, **keywords):
self.func = func
self.args = args
self.keywords = keywords
+ __init__.__text_signature__ = '($self, func, /, *args, **keywords)'
def __repr__(self):
args = ", ".join(map(repr, self.args))
diff --git a/Lib/gzip.py b/Lib/gzip.py
index 948fec293e23d9..7c861874198842 100644
--- a/Lib/gzip.py
+++ b/Lib/gzip.py
@@ -283,7 +283,7 @@ def read(self, size=-1):
def read1(self, size=-1):
"""Implements BufferedIOBase.read1()
- Reads up to a buffer's worth of data is size is negative."""
+ Reads up to a buffer's worth of data if size is negative."""
self._check_not_closed()
if self.mode != READ:
import errno
diff --git a/Lib/http/client.py b/Lib/http/client.py
index 5a2225276b1acd..82908ebe3afd65 100644
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -137,6 +137,16 @@
_is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch
_is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search
+# These characters are not allowed within HTTP URL paths.
+# See https://tools.ietf.org/html/rfc3986#section-3.3 and the
+# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition.
+# Prevents CVE-2019-9740. Includes control characters such as \r\n.
+# We don't restrict chars above \x7f as putrequest() limits us to ASCII.
+_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
+# Arguably only these _should_ allowed:
+# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
+# We are more lenient for assumed real world compatibility purposes.
+
# We always set the Content-Length header for these methods because some
# servers will otherwise respond with a 411
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
@@ -1079,6 +1089,10 @@ def putrequest(self, method, url, skip_host=False,
self._method = method
if not url:
url = '/'
+ # Prevent CVE-2019-9740.
+ if match := _contains_disallowed_url_pchar_re.search(url):
+ raise InvalidURL(f"URL can't contain control characters. {url!r} "
+ f"(found at least {match.group()!r})")
request = '%s %s %s' % (method, url, self._http_vsn_str)
# Non-ASCII characters should have been eliminated earlier
@@ -1405,8 +1419,7 @@ def __repr__(self):
e = ''
return '%s(%i bytes read%s)' % (self.__class__.__name__,
len(self.partial), e)
- def __str__(self):
- return repr(self)
+ __str__ = object.__str__
class ImproperConnectionState(HTTPException):
pass
diff --git a/Lib/idlelib/configdialog.py b/Lib/idlelib/configdialog.py
index 31520a3b0d1e36..4aaec1321f7d68 100644
--- a/Lib/idlelib/configdialog.py
+++ b/Lib/idlelib/configdialog.py
@@ -2225,7 +2225,7 @@ def detach(self):
'General': '''
General:
-AutoComplete: Popupwait is milleseconds to wait after key char, without
+AutoComplete: Popupwait is milliseconds to wait after key char, without
cursor movement, before popping up completion box. Key char is '.' after
identifier or a '/' (or '\\' on Windows) within a string.
diff --git a/Lib/idlelib/help.html b/Lib/idlelib/help.html
index b654ab7f2c5f86..ba44331e87b223 100644
--- a/Lib/idlelib/help.html
+++ b/Lib/idlelib/help.html
@@ -6,7 +6,7 @@
- IDLE — Python 3.8.0a1 documentation
+ IDLE — Python 3.8.0a3 documentation
@@ -19,7 +19,7 @@
@@ -72,7 +72,7 @@