variables:
testRunTitle: '$(build.sourceBranchName)-linux'
testRunPlatform: linux
- openssl_version: 1.1.1d
+ openssl_version: 1.1.1g
steps:
- template: ./posix-steps.yml
variables:
testRunTitle: '$(Build.SourceBranchName)-linux-coverage'
testRunPlatform: linux-coverage
- openssl_version: 1.1.1d
+ openssl_version: 1.1.1g
steps:
- template: ./posix-steps.yml
variables:
testRunTitle: '$(system.pullRequest.TargetBranch)-linux'
testRunPlatform: linux
- openssl_version: 1.1.1d
+ openssl_version: 1.1.1g
steps:
- template: ./posix-steps.yml
variables:
testRunTitle: '$(Build.SourceBranchName)-linux-coverage'
testRunPlatform: linux-coverage
- openssl_version: 1.1.1d
+ openssl_version: 1.1.1g
steps:
- template: ./posix-steps.yml
venv:
$(PYTHON) -m venv $(VENVDIR)
- $(VENVDIR)/bin/python3 -m pip install -U Sphinx blurb
+ $(VENVDIR)/bin/python3 -m pip install -U Sphinx==2.3.1 blurb
@echo "The venv has been created in the $(VENVDIR) directory"
dist:
.. c:function:: int PyErr_ResourceWarning(PyObject *source, Py_ssize_t stack_level, const char *format, ...)
Function similar to :c:func:`PyErr_WarnFormat`, but *category* is
- :exc:`ResourceWarning` and pass *source* to :func:`warnings.WarningMessage`.
+ :exc:`ResourceWarning` and it passes *source* to :func:`warnings.WarningMessage`.
.. versionadded:: 3.6
.. versionadded:: 3.4
+.. c:macro:: PyDoc_STRVAR(name, str)
+
+ Creates a variable with name ``name`` that can be used in docstrings.
+ If Python is built without docstrings, the value will be empty.
+
+ Use :c:macro:`PyDoc_STRVAR` for docstrings to support building
+ Python without docstrings, as specified in :pep:`7`.
+
+ Example::
+
+ PyDoc_STRVAR(pop_doc, "Remove and return the rightmost element.");
+
+ static PyMethodDef deque_methods[] = {
+ // ...
+ {"pop", (PyCFunction)deque_pop, METH_NOARGS, pop_doc},
+ // ...
+ }
+
+.. c:macro:: PyDoc_STR(str)
+
+ Creates a docstring for the given input string or an empty string
+ if docstrings are disabled.
+
+ Use :c:macro:`PyDoc_STR` in specifying docstrings to support
+ building Python without docstrings, as specified in :pep:`7`.
+
+ Example::
+
+ static PyMethodDef pysqlite_row_methods[] = {
+ {"keys", (PyCFunction)pysqlite_row_keys, METH_NOARGS,
+ PyDoc_STR("Returns the keys of the row.")},
+ {NULL, NULL}
+ };
.. _api-objects:
.. c:member:: const char *m_doc
Docstring for the module; usually a docstring variable created with
- :c:func:`PyDoc_STRVAR` is used.
+ :c:macro:`PyDoc_STRVAR` is used.
.. c:member:: Py_ssize_t m_size
.. versionadded:: 3.3
-.. c:function:: int PyObject_GenericSetDict(PyObject *o, void *context)
+.. c:function:: int PyObject_GenericSetDict(PyObject *o, PyObject *value, void *context)
A generic implementation for the setter of a ``__dict__`` descriptor. This
implementation does not allow the dictionary to be deleted.
PyObject_GenericSetAttr:PyObject*:value:+1:
PyObject_GenericSetDict:int:::
-PyObject_GenericSetDict:PyObject*:o:+1:
+PyObject_GenericSetDict:PyObject*:o:0:
+PyObject_GenericSetDict:PyObject*:value:+1:
PyObject_GenericSetDict:void*:context::
PyObject_GetAttr:PyObject*::+1:
Here the ``UpperOut`` class redefines the ``write()`` method to convert the
argument string to uppercase before calling the underlying
-``self.__outfile.write()`` method. All other methods are delegated to the
-underlying ``self.__outfile`` object. The delegation is accomplished via the
+``self._outfile.write()`` method. All other methods are delegated to the
+underlying ``self._outfile`` object. The delegation is accomplished via the
``__getattr__`` method; consult :ref:`the language reference <attribute-access>`
for more information about controlling attribute access.
In Python, you use ``socket.setblocking(0)`` to make it non-blocking. In C, it's
more complex, (for one thing, you'll need to choose between the BSD flavor
-``O_NONBLOCK`` and the almost indistinguishable Posix flavor ``O_NDELAY``, which
+``O_NONBLOCK`` and the almost indistinguishable POSIX flavor ``O_NDELAY``, which
is completely different from ``TCP_NODELAY``), but it's the exact same idea. You
do this after creating the socket, but before using it. (Actually, if you're
nuts, you can switch back and forth.)
python setup.py install --install-scripts=/usr/local/bin
-(This performs an installation using the "prefix scheme," where the prefix is
+(This performs an installation using the "prefix scheme", where the prefix is
whatever your Python interpreter was installed with--- :file:`/usr/local/python`
in this case.)
value=Name(id='data', ctx=Load()),
slice=Index(value=Str(s=node.id)),
ctx=node.ctx
- ), node)
+ )
Keep in mind that if the node you're operating on has child nodes you must
either transform the child nodes yourself or call the :meth:`generic_visit`
*source* is the source string; *filename* is the optional filename from which
source was read, defaulting to ``'<input>'``; and *symbol* is the optional
- grammar start symbol, which should be either ``'single'`` (the default) or
- ``'eval'``.
+ grammar start symbol, which should be ``'single'`` (the default), ``'eval'``
+ or ``'exec'``.
Returns a code object (the same as ``compile(source, filename, symbol)``) if the
command is complete and valid; ``None`` if the command is incomplete; raises
:exc:`OverflowError` or :exc:`ValueError` if there is an invalid literal.
The *symbol* argument determines whether *source* is compiled as a statement
- (``'single'``, the default) or as an :term:`expression` (``'eval'``). Any
- other value will cause :exc:`ValueError` to be raised.
+ (``'single'``, the default), as a sequence of statements (``'exec'``) or
+ as an :term:`expression` (``'eval'``). Any other value will
+ cause :exc:`ValueError` to be raised.
.. note::
If a row has more fields than fieldnames, the remaining data is put in a
list and stored with the fieldname specified by *restkey* (which defaults
to ``None``). If a non-blank row has fewer fields than fieldnames, the
- missing values are filled-in with ``None``.
+ missing values are filled-in with the value of *restval* (which defaults
+ to ``None``).
All other optional or keyword arguments are passed to the underlying
:class:`reader` instance.
The member variables to use in these generated methods are defined
using :pep:`526` type annotations. For example this code::
+ from dataclasses import dataclass
+
@dataclass
class InventoryItem:
'''Class for keeping track of an item in inventory.'''
A. Yes. In the CPython and PyPy3 implementations, the C/CFFI versions of
the decimal module integrate the high speed `libmpdec
<https://www.bytereef.org/mpdecimal/doc/libmpdec/index.html>`_ library for
-arbitrary precision correctly-rounded decimal floating point arithmetic [#]_.
+arbitrary precision correctly-rounded decimal floating point arithmetic.
``libmpdec`` uses `Karatsuba multiplication
<https://en.wikipedia.org/wiki/Karatsuba_algorithm>`_
for medium-sized numbers and the `Number Theoretic Transform
<https://en.wikipedia.org/wiki/Discrete_Fourier_transform_(general)#Number-theoretic_transform>`_
-for very large numbers.
+for very large numbers. However, to realize this performance gain, the
+context needs to be set for unrounded calculations.
-The context must be adapted for exact arbitrary precision arithmetic. :attr:`Emin`
-and :attr:`Emax` should always be set to the maximum values, :attr:`clamp`
-should always be 0 (the default). Setting :attr:`prec` requires some care.
+ >>> c = getcontext()
+ >>> c.prec = MAX_PREC
+ >>> c.Emax = MAX_EMAX
+ >>> c.Emin = MIN_EMIN
-The easiest approach for trying out bignum arithmetic is to use the maximum
-value for :attr:`prec` as well [#]_::
-
- >>> setcontext(Context(prec=MAX_PREC, Emax=MAX_EMAX, Emin=MIN_EMIN))
- >>> x = Decimal(2) ** 256
- >>> x / 128
- Decimal('904625697166532776746648320380374280103671755200316906558262375061821325312')
-
-
-For inexact results, :attr:`MAX_PREC` is far too large on 64-bit platforms and
-the available memory will be insufficient::
-
- >>> Decimal(1) / 3
- Traceback (most recent call last):
- File "<stdin>", line 1, in <module>
- MemoryError
-
-On systems with overallocation (e.g. Linux), a more sophisticated approach is to
-adjust :attr:`prec` to the amount of available RAM. Suppose that you have 8GB of
-RAM and expect 10 simultaneous operands using a maximum of 500MB each::
-
- >>> import sys
- >>>
- >>> # Maximum number of digits for a single operand using 500MB in 8-byte words
- >>> # with 19 digits per word (4-byte and 9 digits for the 32-bit build):
- >>> maxdigits = 19 * ((500 * 1024**2) // 8)
- >>>
- >>> # Check that this works:
- >>> c = Context(prec=maxdigits, Emax=MAX_EMAX, Emin=MIN_EMIN)
- >>> c.traps[Inexact] = True
- >>> setcontext(c)
- >>>
- >>> # Fill the available precision with nines:
- >>> x = Decimal(0).logical_invert() * 9
- >>> sys.getsizeof(x)
- 524288112
- >>> x + 2
- Traceback (most recent call last):
- File "<stdin>", line 1, in <module>
- decimal.Inexact: [<class 'decimal.Inexact'>]
-
-In general (and especially on systems without overallocation), it is recommended
-to estimate even tighter bounds and set the :attr:`Inexact` trap if all calculations
-are expected to be exact.
-
-
-.. [#]
- .. versionadded:: 3.3
-
-.. [#]
- .. versionchanged:: 3.9
- This approach now works for all exact results except for non-integer powers.
- Also backported to 3.7 and 3.8.
+.. versionadded:: 3.3
\ No newline at end of file
.. function:: findlabels(code)
- Detect all offsets in the code object *code* which are jump targets, and
+ Detect all offsets in the raw compiled bytecode string *code* which are jump targets, and
return a list of these offsets.
.. opcode:: BUILD_CONST_KEY_MAP (count)
- The version of :opcode:`BUILD_MAP` specialized for constant keys. *count*
- values are consumed from the stack. The top element on the stack contains
- a tuple of keys.
+ The version of :opcode:`BUILD_MAP` specialized for constant keys. Pops the
+ top element on the stack which contains a tuple of keys, then starting from
+ ``TOS1``, pops *count* values to form values in the built dictionary.
.. versionadded:: 3.6
.. versionadded:: 3.7
-.. opcode:: MAKE_FUNCTION (argc)
+.. opcode:: MAKE_FUNCTION (flags)
Pushes a new function object on the stack. From bottom to top, the consumed
stack must consist of values if the argument carries a specified flag value
the next :class:`int` in sequence with the last :class:`int` provided, but
the way it does this is an implementation detail and may change.
+.. note::
+
+ The :meth:`_generate_next_value_` method must be defined before any members.
+
Iteration
---------
Open a search-and-replace dialog.
Go to Line
- Move cursor to the line number requested and make that line visible.
+ Move the cursor to the beginning of the line requested and make that
+ line visible. A request past the end of the file goes to the end.
+ Clear any selection and update the line and column status.
Show Completions
Open a scrollable list allowing selection of keywords and attributes. See
**Source code:** :source:`Lib/imp.py`
.. deprecated:: 3.4
- The :mod:`imp` package is pending deprecation in favor of :mod:`importlib`.
+ The :mod:`imp` module is deprecated in favor of :mod:`importlib`.
.. index:: statement: import
Only class methods are defined by this class to alleviate the need for
instantiation.
+ .. versionchanged:: 3.4
+ Gained :meth:`~Loader.create_module` and :meth:`~Loader.exec_module`
+ methods.
+
.. class:: WindowsRegistryFinder
--------------
-This module allows a Python program to determine if a string is a keyword.
+This module allows a Python program to determine if a string is a
+:ref:`keyword <keywords>`.
.. function:: iskeyword(s)
- Return ``True`` if *s* is a Python keyword.
+ Return ``True`` if *s* is a Python :ref:`keyword <keywords>`.
.. data:: kwlist
- Sequence containing all the keywords defined for the interpreter. If any
- keywords are defined to only be active when particular :mod:`__future__`
- statements are in effect, these will be included as well.
+ Sequence containing all the :ref:`keywords <keywords>` defined for the
+ interpreter. If any keywords are defined to only be active when particular
+ :mod:`__future__` statements are in effect, these will be included as well.
automatically started to view the file.
The mailcap format is documented in :rfc:`1524`, "A User Agent Configuration
-Mechanism For Multimedia Mail Format Information," but is not an Internet
+Mechanism For Multimedia Mail Format Information", but is not an Internet
standard. However, mailcap files are supported on most Unix systems.
>>> def f(x):
... return x*x
...
- >>> p.map(f, [1,2,3])
+ >>> with p:
+ ... p.map(f, [1,2,3])
Process PoolWorker-1:
Process PoolWorker-2:
Process PoolWorker-3:
Note that the methods of the pool object should only be called by
the process which created the pool.
+ .. warning::
+ :class:`multiprocessing.pool` objects have internal resources that need to be
+ properly managed (like any other resource) by using the pool as a context manager
+ or by calling :meth:`close` and :meth:`terminate` manually. Failure to do this
+ can lead to the process hanging on finalization.
+
+ Note that is **not correct** to rely on the garbage colletor to destroy the pool
+ as CPython does not assure that the finalizer of the pool will be called
+ (see :meth:`object.__del__` for more information).
+
.. versionadded:: 3.2
*maxtasksperchild*
Return ``True`` if a core dump was generated for the process, otherwise
return ``False``.
+ This function should be employed only if :func:`WIFSIGNALED` is true.
+
.. availability:: Unix.
.. function:: WIFCONTINUED(status)
- Return ``True`` if the process has been continued from a job control stop,
- otherwise return ``False``.
+ Return ``True`` if a stopped child has been resumed by delivery of
+ :data:`~signal.SIGCONT` (if the process has been continued from a job
+ control stop), otherwise return ``False``.
+
+ See :data:`WCONTINUED` option.
.. availability:: Unix.
.. function:: WIFSTOPPED(status)
- Return ``True`` if the process has been stopped, otherwise return
- ``False``.
+ Return ``True`` if the process was stopped by delivery of a signal,
+ otherwise return ``False``.
- .. availability:: Unix.
+ :func:`WIFSTOPPED` only returns ``True`` if the :func:`waitpid` call was
+ done using :data:`WUNTRACED` option or when the process is being traced (see
+ :manpage:`ptrace(2)`).
+ .. availability:: Unix.
.. function:: WIFSIGNALED(status)
- Return ``True`` if the process exited due to a signal, otherwise return
+ Return ``True`` if the process was terminated by a signal, otherwise return
``False``.
.. availability:: Unix.
.. function:: WIFEXITED(status)
- Return ``True`` if the process exited using the :manpage:`exit(2)` system call,
+ Return ``True`` if the process exited terminated normally, that is,
+ by calling ``exit()`` or ``_exit()``, or by returning from ``main()``;
otherwise return ``False``.
.. availability:: Unix.
.. function:: WEXITSTATUS(status)
- If ``WIFEXITED(status)`` is true, return the integer parameter to the
- :manpage:`exit(2)` system call. Otherwise, the return value is meaningless.
+ Return the process exit status.
+
+ This function should be employed only if :func:`WIFEXITED` is true.
.. availability:: Unix.
Return the signal which caused the process to stop.
+ This function should be employed only if :func:`WIFSTOPPED` is true.
+
.. availability:: Unix.
.. function:: WTERMSIG(status)
- Return the signal which caused the process to exit.
+ Return the number of the signal that caused the process to terminate.
+
+ This function should be employed only if :func:`WIFSIGNALED` is true.
.. availability:: Unix.
for generating bindings for C++ libraries as Python classes, and
is specifically designed for Python.
- `PySide <https://wiki.qt.io/PySide>`_
- PySide is a newer binding to the Qt toolkit, provided by Nokia.
- Compared to PyQt, its licensing scheme is friendlier to non-open source
- applications.
+ `PySide2 <https://doc.qt.io/qtforpython/>`_
+ Also known as the Qt for Python project, PySide2 is a newer binding to the
+ Qt toolkit. It is provided by The Qt Company and aims to provide a
+ complete port of PySide to Qt 5. Compared to PyQt, its licensing scheme is
+ friendlier to non-open source applications.
`wxPython <https://www.wxpython.org>`_
wxPython is a cross-platform GUI toolkit for Python that is built around
an XML-based resource format and more, including an ever growing library
of user-contributed modules.
-PyGTK, PyQt, and wxPython, all have a modern look and feel and more
+PyGTK, PyQt, PySide2, and wxPython, all have a modern look and feel and more
widgets than Tkinter. In addition, there are many other GUI toolkits for
Python, both cross-platform, and platform-specific. See the `GUI Programming
<https://wiki.python.org/moin/GuiProgramming>`_ page in the Python Wiki for a
>>> PurePath('a/b.py').match('/*.py')
False
- As with other methods, case-sensitivity is observed::
+ As with other methods, case-sensitivity follows platform defaults::
+ >>> PurePosixPath('b.py').match('*.PY')
+ False
>>> PureWindowsPath('b.py').match('*.PY')
True
instances of :class:`~datetime.datetime`, :class:`~datetime.date` and
:class:`~datetime.time` pickled by Python 2.
-.. function:: loads(bytes_object, \*, fix_imports=True, encoding="ASCII", errors="strict")
+.. function:: loads(data, \*, fix_imports=True, encoding="ASCII", errors="strict")
Return the reconstituted object hierarchy of the pickled representation
- *bytes_object* of an object.
+ *data* of an object. *data* must be a :term:`bytes-like object`.
The protocol version of the pickle is detected automatically, so no
protocol argument is needed. Bytes past the pickled representation
At unpickling time, some methods like :meth:`__getattr__`,
:meth:`__getattribute__`, or :meth:`__setattr__` may be called upon the
instance. In case those methods rely on some internal invariant being
- true, the type should implement :meth:`__getnewargs__` or
- :meth:`__getnewargs_ex__` to establish such an invariant; otherwise,
- neither :meth:`__new__` nor :meth:`__init__` will be called.
+ true, the type should implement :meth:`__new__` to establish such an
+ invariant, as :meth:`__init__` is not called when unpickling an
+ instance.
.. index:: pair: copy; protocol
.. index:: builtin: eval
- Determine if the formatted representation of *object* is "readable," or can be
+ Determine if the formatted representation of *object* is "readable", or can be
used to reconstruct the value using :func:`eval`. This always returns ``False``
for recursive objects.
.. function:: select(rlist, wlist, xlist[, timeout])
This is a straightforward interface to the Unix :c:func:`select` system call.
- The first three arguments are sequences of 'waitable objects': either
+ The first three arguments are iterables of 'waitable objects': either
integers representing file descriptors or objects with a parameterless method
named :meth:`~io.IOBase.fileno` returning such an integer:
* *xlist*: wait for an "exceptional condition" (see the manual page for what
your system considers such a condition)
- Empty sequences are allowed, but acceptance of three empty sequences is
+ Empty iterables are allowed, but acceptance of three empty iterables is
platform-dependent. (It is known to work on Unix but not on Windows.) The
optional *timeout* argument specifies a time-out as a floating point number
in seconds. When the *timeout* argument is omitted the function blocks until
single: socket() (in module socket)
single: popen() (in module os)
- Among the acceptable object types in the sequences are Python :term:`file
+ Among the acceptable object types in the iterables are Python :term:`file
objects <file object>` (e.g. ``sys.stdin``, or objects returned by
:func:`open` or :func:`os.popen`), socket objects returned by
:func:`socket.socket`. You may also define a :dfn:`wrapper` class yourself,
available), or "xztar" (if the :mod:`lzma` module is available).
*root_dir* is a directory that will be the root directory of the
- archive; for example, we typically chdir into *root_dir* before creating the
- archive.
+ archive, all paths in the archive will be relative to it; for example,
+ we typically chdir into *root_dir* before creating the archive.
*base_dir* is the directory where we start archiving from;
i.e. *base_dir* will be the common prefix of all files and
- directories in the archive.
+ directories in the archive. *base_dir* must be given relative
+ to *root_dir*. See :ref:`shutil-archiving-example-with-basedir` for how to
+ use *base_dir* and *root_dir* together.
*root_dir* and *base_dir* both default to the current directory.
-rw-r--r-- tarek/staff 37192 2010-02-06 18:23:10 ./known_hosts
+.. _shutil-archiving-example-with-basedir:
+
+Archiving example with *base_dir*
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In this example, similar to the `one above <shutil-archiving-example_>`_,
+we show how to use :func:`make_archive`, but this time with the usage of
+*base_dir*. We now have the following directory structure:
+
+.. code-block:: shell-session
+
+ $ tree tmp
+ tmp
+ └── root
+ └── structure
+ ├── content
+ └── please_add.txt
+ └── do_not_add.txt
+
+In the final archive, :file:`please_add.txt` should be included, but
+:file:`do_not_add.txt` should not. Therefore we use the following::
+
+ >>> from shutil import make_archive
+ >>> import os
+ >>> archive_name = os.path.expanduser(os.path.join('~', 'myarchive'))
+ >>> make_archive(
+ ... archive_name,
+ ... 'tar',
+ ... root_dir='tmp/root',
+ ... base_dir='structure/content',
+ ... )
+ '/Users/tarek/my_archive.tar'
+
+Listing the files in the resulting archive gives us:
+
+.. code-block:: shell-session
+
+ $ python -m tarfile -l /Users/tarek/myarchive.tar
+ structure/content/
+ structure/content/please_add.txt
+
+
Querying the size of the output terminal
----------------------------------------
signal.
+.. data:: SIGABRT
+
+ Abort signal from :manpage:`abort(3)`.
+
+.. data:: SIGALRM
+
+ Timer signal from :manpage:`alarm(2)`.
+
+ .. availability:: Unix.
+
+.. data:: SIGBREAK
+
+ Interrupt from keyboard (CTRL + BREAK).
+
+ .. availability:: Windows.
+
+.. data:: SIGBUS
+
+ Bus error (bad memory access).
+
+ .. availability:: Unix.
+
+.. data:: SIGCHLD
+
+ Child process stopped or terminated.
+
+ .. availability:: Windows.
+
+.. data:: SIGCLD
+
+ Alias to :data:`SIGCHLD`.
+
+.. data:: SIGCONT
+
+ Continue the process if it is currently stopped
+
+ .. availability:: Unix.
+
+.. data:: SIGFPE
+
+ Floating-point exception. For example, division by zero.
+
+ .. seealso::
+ :exc:`ZeroDivisionError` is raised when the second argument of a division
+ or modulo operation is zero.
+
+.. data:: SIGHUP
+
+ Hangup detected on controlling terminal or death of controlling process.
+
+ .. availability:: Unix.
+
+.. data:: SIGILL
+
+ Illegal instruction.
+
+.. data:: SIGINT
+
+ Interrupt from keyboard (CTRL + C).
+
+ Default action is to raise :exc:`KeyboardInterrupt`.
+
+.. data:: SIGKILL
+
+ Kill signal.
+
+ It cannot be caught, blocked, or ignored.
+
+ .. availability:: Unix.
+
+.. data:: SIGPIPE
+
+ Broken pipe: write to pipe with no readers.
+
+ Default action is to ignore the signal.
+
+ .. availability:: Unix.
+
+.. data:: SIGSEGV
+
+ Segmentation fault: invalid memory reference.
+
+.. data:: SIGTERM
+
+ Termination signal.
+
+.. data:: SIGUSR1
+
+ User-defined signal 1.
+
+ .. availability:: Unix.
+
+.. data:: SIGUSR2
+
+ User-defined signal 2.
+
+ .. availability:: Unix.
+
+.. data:: SIGWINCH
+
+ Window resize signal.
+
+ .. availability:: Unix.
+
.. data:: SIG*
All the signal numbers are defined symbolically. For example, the hangup signal
For example, ``signal.pthread_sigmask(signal.SIG_BLOCK, [])`` reads the
signal mask of the calling thread.
+ :data:`SIGKILL` and :data:`SIGSTOP` cannot be blocked.
+
.. availability:: Unix. See the man page :manpage:`sigprocmask(3)` and
:manpage:`pthread_sigmask(3)` for further information.
.. method:: shutdown()
Tell the :meth:`serve_forever` loop to stop and wait until it does.
+ :meth:`shutdown` must be called while :meth:`serve_forever` is running in a
+ different thread otherwise it will deadlock.
.. method:: server_close()
that 'mytype' is the type of the column. It will try to find an entry of
'mytype' in the converters dictionary and then use the converter function found
there to return the value. The column name found in :attr:`Cursor.description`
- is only the first word of the column name, i. e. if you use something like
- ``'as "x [datetime]"'`` in your SQL, then we will parse out everything until the
- first blank for the column name: the column name would simply be "x".
+ does not include the type, i. e. if you use something like
+ ``'as "Expiration date [datetime]"'`` in your SQL, then we will parse out
+ everything until the first ``'['`` for the column name and strip
+ the preceeding space: the column name would simply be "Expiration date".
.. function:: connect(database[, timeout, detect_types, isolation_level, check_same_thread, factory, cached_statements, uri])
:class:`socket.socket` type, and provides a socket-like wrapper that also
encrypts and decrypts the data going over the socket with SSL. It supports
additional methods such as :meth:`getpeercert`, which retrieves the
-certificate of the other side of the connection, and :meth:`cipher`,which
+certificate of the other side of the connection, and :meth:`cipher`, which
retrieves the cipher being used for the secure connection.
For more sophisticated applications, the :class:`ssl.SSLContext` class
(('postalCode', '03894-4801'),),
(('countryName', 'US'),),
(('stateOrProvinceName', 'NH'),),
- (('localityName', 'Wolfeboro,'),),
+ (('localityName', 'Wolfeboro'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'www.python.org'),)),
'subjectAltName': (('DNS', 'www.python.org'),
Negative shift counts are illegal and cause a :exc:`ValueError` to be raised.
(2)
- A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)``
- without overflow check.
+ A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)``.
(3)
- A right shift by *n* bits is equivalent to division by ``pow(2, n)`` without
- overflow check.
+ A right shift by *n* bits is equivalent to floor division by ``pow(2, n)``.
(4)
Performing these calculations with at least one extra sign extension bit in
.. function:: run(args, *, stdin=None, input=None, stdout=None, stderr=None,\
capture_output=False, shell=False, cwd=None, timeout=None, \
check=False, encoding=None, errors=None, text=None, env=None, \
- universal_newlines=None)
+ universal_newlines=None, **other_popen_kwargs)
Run the command described by *args*. Wait for command to complete, then
return a :class:`CompletedProcess` instance.
.. method:: Popen.terminate()
- Stop the child. On Posix OSs the method sends SIGTERM to the
+ Stop the child. On POSIX OSs the method sends SIGTERM to the
child. On Windows the Win32 API function :c:func:`TerminateProcess` is called
to stop the child.
.. method:: Popen.kill()
- Kills the child. On Posix OSs the function sends SIGKILL to the child.
+ Kills the child. On POSIX OSs the function sends SIGKILL to the child.
On Windows :meth:`kill` is an alias for :meth:`terminate`.
subprocess. You can now use :func:`run` in many cases, but lots of existing code
calls these functions.
-.. function:: call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None)
+.. function:: call(args, *, stdin=None, stdout=None, stderr=None, \
+ shell=False, cwd=None, timeout=None, **other_popen_kwargs)
Run the command described by *args*. Wait for command to complete, then
return the :attr:`~Popen.returncode` attribute.
.. versionchanged:: 3.3
*timeout* was added.
-.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None)
+.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, \
+ shell=False, cwd=None, timeout=None, \
+ **other_popen_kwargs)
Run command with arguments. Wait for command to complete. If the return
code was zero then return, otherwise raise :exc:`CalledProcessError`. The
.. function:: check_output(args, *, stdin=None, stderr=None, shell=False, \
cwd=None, encoding=None, errors=None, \
- universal_newlines=None, timeout=None, text=None)
+ universal_newlines=None, timeout=None, text=None, \
+ **other_popen_kwargs)
Run command with arguments and return its output.
Python currently supports seven schemes:
-- *posix_prefix*: scheme for Posix platforms like Linux or Mac OS X. This is
+- *posix_prefix*: scheme for POSIX platforms like Linux or Mac OS X. This is
the default scheme used when Python or a component is installed.
-- *posix_home*: scheme for Posix platforms used when a *home* option is used
+- *posix_home*: scheme for POSIX platforms used when a *home* option is used
upon installation. This scheme is used when a component is installed through
Distutils with a specific home prefix.
-- *posix_user*: scheme for Posix platforms used when a component is installed
+- *posix_user*: scheme for POSIX platforms used when a component is installed
through Distutils and the *user* option is used. This scheme defines paths
located under the user home directory.
- *nt*: scheme for NT platforms like Windows.
import tarfile
tar = tarfile.open("sample.tar.gz", "r:gz")
for tarinfo in tar:
- print(tarinfo.name, "is", tarinfo.size, "bytes in size and is", end="")
+ print(tarinfo.name, "is", tarinfo.size, "bytes in size and is ", end="")
if tarinfo.isreg():
print("a regular file.")
elif tarinfo.isdir():
:attr:`fix_sentence_endings` is false by default.
Since the sentence detection algorithm relies on ``string.lowercase`` for
- the definition of "lowercase letter," and a convention of using two spaces
+ the definition of "lowercase letter", and a convention of using two spaces
after a period to separate sentences on the same line, it is specific to
English-language texts.
similarly.
If :option:`-n` is not given, a suitable number of loops is calculated by trying
-successive powers of 10 until the total time is at least 0.2 seconds.
+increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the total
+time is at least 0.2 seconds.
:func:`default_timer` measurements can be affected by other programs running on
the same machine, so the best thing to do when accurate timing is necessary is
The :mod:`tokenize` module provides a lexical scanner for Python source code,
implemented in Python. The scanner in this module returns comments as tokens
-as well, making it useful for implementing "pretty-printers," including
+as well, making it useful for implementing "pretty-printers", including
colorizers for on-screen displays.
To simplify token stream handling, all :ref:`operator <operators>` and
print("Top %s lines" % limit)
for index, stat in enumerate(top_stats[:limit], 1):
frame = stat.traceback[0]
- # replace "/path/to/module/file.py" with "module/file.py"
- filename = os.sep.join(frame.filename.split(os.sep)[-2:])
print("#%s: %s:%s: %.1f KiB"
- % (index, filename, frame.lineno, stat.size / 1024))
+ % (index, frame.filename, frame.lineno, stat.size / 1024))
line = linecache.getline(frame.filename, frame.lineno).strip()
if line:
print(' %s' % line)
.. versionadded:: 3.1
- .. method:: assertIn(first, second, msg=None)
- assertNotIn(first, second, msg=None)
+ .. method:: assertIn(member, container, msg=None)
+ assertNotIn(member, container, msg=None)
- Test that *first* is (or is not) in *second*.
+ Test that *member* is (or is not) in *container*.
.. versionadded:: 3.1
application without adding attributes to those objects. This can be especially
useful with objects that override attribute accesses.
- .. note::
-
- Caution: Because a :class:`WeakKeyDictionary` is built on top of a Python
- dictionary, it must not change size when iterating over it. This can be
- difficult to ensure for a :class:`WeakKeyDictionary` because actions
- performed by the program during iteration may cause items in the
- dictionary to vanish "by magic" (as a side effect of garbage collection).
:class:`WeakKeyDictionary` objects have an additional method that
exposes the internal references directly. The references are not guaranteed to
Mapping class that references values weakly. Entries in the dictionary will be
discarded when no strong reference to the value exists any more.
- .. note::
-
- Caution: Because a :class:`WeakValueDictionary` is built on top of a Python
- dictionary, it must not change size when iterating over it. This can be
- difficult to ensure for a :class:`WeakValueDictionary` because actions performed
- by the program during iteration may cause items in the dictionary to vanish "by
- magic" (as a side effect of garbage collection).
:class:`WeakValueDictionary` objects have an additional method that has the
same issues as the :meth:`keyrefs` method of :class:`WeakKeyDictionary`
The same can be done using the :func:`create_archive` function::
>>> import zipapp
- >>> zipapp.create_archive('myapp.pyz', 'myapp')
+ >>> zipapp.create_archive('myapp', 'myapp.pyz')
To make the application directly executable on POSIX, specify an interpreter
to use.
%PYTHON% -c "import sphinx" > nul 2> nul\r
if errorlevel 1 (\r
echo Installing sphinx with %PYTHON%\r
- %PYTHON% -m pip install sphinx\r
+ %PYTHON% -m pip install sphinx==2.2.0\r
if errorlevel 1 exit /B\r
)\r
set SPHINXBUILD=%PYTHON% -c "import sphinx.cmd.build, sys; sys.exit(sphinx.cmd.build.main())"\r
:dfn:`Objects` are Python's abstraction for data. All data in a Python program
is represented by objects or by relations between objects. (In a sense, and in
-conformance to Von Neumann's model of a "stored program computer," code is also
+conformance to Von Neumann's model of a "stored program computer", code is also
represented by objects.)
.. index::
equal (e.g., ``1`` and ``1.0``) then they can be used interchangeably to index
the same dictionary entry.
+ Dictionaries preserve insertion order, meaning that keys will be produced
+ in the same order they were added sequentially over the dictionary.
+ Replacing an existing key does not change the order, however removing a key
+ and re-inserting it will add it to the end instead of keeping its old place.
+
Dictionaries are mutable; they can be created by the ``{...}`` notation (see
section :ref:`dict`).
additional examples of mapping types, as does the :mod:`collections`
module.
+ .. versionchanged:: 3.7
+ Dictionaries did not preserve insertion order in versions of Python before 3.6.
+ In CPython 3.6, insertion order was preserved, but it was considered
+ an implementation detail at that time rather than a language guarantee.
+
Callable types
.. index::
object: callable
.. index:: pair: arithmetic; conversion
When a description of an arithmetic operator below uses the phrase "the numeric
-arguments are converted to a common type," this means that the operator
+arguments are converted to a common type", this means that the operator
implementation for built-in types works as follows:
* If either argument is a complex number, the other is converted to complex;
The not-a-number values ``float('NaN')`` and ``decimal.Decimal('NaN')`` are
special. Any ordered comparison of a number to a not-a-number value is false.
A counter-intuitive implication is that not-a-number values are not equal to
- themselves. For example, if ``x = float('NaN')``, ``3 < x``, ``x < 3``, ``x
- == x``, ``x != x`` are all false. This behavior is compliant with IEEE 754.
+ themselves. For example, if ``x = float('NaN')``, ``3 < x``, ``x < 3`` and
+ ``x == x`` are all false, while ``x != x`` is true. This behavior is
+ compliant with IEEE 754.
* Binary sequences (instances of :class:`bytes` or :class:`bytearray`) can be
compared within and across their types. They compare lexicographically using
A non-normative HTML file listing all valid identifier characters for Unicode
4.1 can be found at
-https://www.dcl.hpi.uni-potsdam.de/home/loewis/table-3131.html.
+https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt
.. _keywords:
information on this convention.
``__*__``
- System-defined names. These names are defined by the interpreter and its
- implementation (including the standard library). Current system names are
- discussed in the :ref:`specialnames` section and elsewhere. More will likely
- be defined in future versions of Python. *Any* use of ``__*__`` names, in
- any context, that does not follow explicitly documented use, is subject to
+ System-defined names, informally known as "dunder" names. These names are
+ defined by the interpreter and its implementation (including the standard library).
+ Current system names are discussed in the :ref:`specialnames` section and elsewhere.
+ More will likely be defined in future versions of Python. *Any* use of ``__*__`` names,
+ in any context, that does not follow explicitly documented use, is subject to
breakage without warning.
``__*``
final_argument_whitespace = True
option_spec = {}
- _label = 'Deprecated since version {deprecated}, will be removed in version {removed}'
+ _deprecated_label = 'Deprecated since version {deprecated}, will be removed in version {removed}'
+ _removed_label = 'Deprecated since version {deprecated}, removed in version {removed}'
def run(self):
node = addnodes.versionmodified()
node['type'] = 'deprecated-removed'
version = (self.arguments[0], self.arguments[1])
node['version'] = version
- label = translators['sphinx'].gettext(self._label)
+ env = self.state.document.settings.env
+ current_version = tuple(int(e) for e in env.config.version.split('.'))
+ removed_version = tuple(int(e) for e in self.arguments[1].split('.'))
+ if current_version < removed_version:
+ label = self._deprecated_label
+ else:
+ label = self._removed_label
+
+ label = translators['sphinx'].gettext(label)
text = label.format(deprecated=self.arguments[0], removed=self.arguments[1])
if len(self.arguments) == 3:
inodes, messages = self.state.inline_text(self.arguments[2],
'(?:release/\\d.\\d[\\x\\d\\.]*)'];
var all_versions = {
- '3.9': 'dev (3.9)',
+ '3.10': 'dev (3.10)',
+ '3.9': 'pre (3.9)',
'3.8': '3.8',
'3.7': '3.7',
'3.6': '3.6',
<p>These archives contain all the content in the documentation.</p>
-<p>HTML Help (<tt>.chm</tt>) files are made available in the "Windows" section
+<p>HTML Help (<code>.chm</code>) files are made available in the "Windows" section
on the <a href="https://www.python.org/downloads/release/python-{{ release.replace('.', '') }}/">Python
download page</a>.</p>
{% trans %}CPython implementation detail:{% endtrans %}
{% trans %}Deprecated since version {deprecated}, will be removed in version {removed}{% endtrans %}
+{% trans %}Deprecated since version {deprecated}, removed in version {removed}{% endtrans %}
<p><a href="{{ pathto('download') }}">{% trans %}Download these documents{% endtrans %}</a></p>
<h3>{% trans %}Docs by version{% endtrans %}</h3>
<ul>
- <li><a href="https://docs.python.org/3.9/">{% trans %}Python 3.9 (in development){% endtrans %}</a></li>
+ <li><a href="https://docs.python.org/3.10/">{% trans %}Python 3.10 (in development){% endtrans %}</a></li>
+ <li><a href="https://docs.python.org/3.9/">{% trans %}Python 3.9 (pre-release){% endtrans %}</a></li>
<li><a href="https://docs.python.org/3.8/">{% trans %}Python 3.8 (stable){% endtrans %}</a></li>
<li><a href="https://docs.python.org/3.7/">{% trans %}Python 3.7 (stable){% endtrans %}</a></li>
<li><a href="https://docs.python.org/3.6/">{% trans %}Python 3.6 (security-fixes){% endtrans %}</a></li>
Now what can we do with instance objects? The only operations understood by
instance objects are attribute references. There are two kinds of valid
-attribute names, data attributes and methods.
+attribute names: data attributes and methods.
*data attributes* correspond to "instance variables" in Smalltalk, and to "data
members" in C++. Data attributes need not be declared; like local variables,
If you have a really long format string that you don't want to split up, it
would be nice if you could reference the variables to be formatted by name
instead of by position. This can be done by simply passing the dict and using
-square brackets ``'[]'`` to access the keys ::
+square brackets ``'[]'`` to access the keys. ::
>>> table = {'Sjoerd': 4127, 'Jack': 4098, 'Dcab': 8637678}
>>> print('Jack: {0[Jack]:d}; Sjoerd: {0[Sjoerd]:d}; '
Old string formatting
---------------------
-The ``%`` operator can also be used for string formatting. It interprets the
-left argument much like a :c:func:`sprintf`\ -style format string to be applied
-to the right argument, and returns the string resulting from this formatting
-operation. For example::
+The % operator (modulo) can also be used for string formatting. Given ``'string'
+% values``, instances of ``%`` in ``string`` are replaced with zero or more
+elements of ``values``. This operation is commonly known as string
+interpolation. For example::
>>> import math
>>> print('The value of pi is approximately %5.3f.' % math.pi)
* A :file:`Python 3.7` folder in your :file:`Applications` folder. In here
you find IDLE, the development environment that is a standard part of official
- Python distributions; PythonLauncher, which handles double-clicking Python
- scripts from the Finder; and the "Build Applet" tool, which allows you to
- package Python scripts as standalone applications on your system.
+ Python distributions; and PythonLauncher, which handles double-clicking Python
+ scripts from the Finder.
* A framework :file:`/Library/Frameworks/Python.framework`, which includes the
Python executable and libraries. The installer adds this location to your shell
Distributing Python Applications on the Mac
===========================================
-The "Build Applet" tool that is placed in the MacPython 3.6 folder is fine for
-packaging small Python scripts on your own machine to run as a standard Mac
-application. This tool, however, is not robust enough to distribute Python
-applications to other users.
-
The standard tool for deploying standalone Python applications on the Mac is
:program:`py2app`. More information on installing and using py2app can be found
at http://undefined.org/python/#py2app.
to ``1``.
This allows the :func:`open` function, the :mod:`os` module and most other
-path functionality to accept and return paths longer than 260 characters when
-using strings. (Use of bytes as paths is deprecated on Windows, and this feature
-is not available when using bytes.)
+path functionality to accept and return paths longer than 260 characters.
After changing the above option, no further configuration is required.
Types themselves are represented as objects; an object contains a
pointer to the corresponding type object. The type itself has a type
pointer pointing to the object representing the type 'type', which
-contains a pointer to itself!).
+contains a pointer to itself!.
Objects do not float around in memory; once allocated an object keeps
the same size and address. Objects that must hold variable-size data
/*--start constants--*/
#define PY_MAJOR_VERSION 3
#define PY_MINOR_VERSION 7
-#define PY_MICRO_VERSION 7
+#define PY_MICRO_VERSION 8
#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL
#define PY_RELEASE_SERIAL 0
/* Version as a string */
-#define PY_VERSION "3.7.7"
+#define PY_VERSION "3.7.8"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
struct _ts *next;
PyInterpreterState *interp;
+ /* Borrowed reference to the current frame (it can be NULL) */
struct _frame *frame;
int recursion_depth;
char overflowed; /* The stack has overflowed. Allow 50 more calls
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub(r'-arch\s+\w+\s', ' ', flags, flags=re.ASCII)
- flags = re.sub('-isysroot [^ \t]*', ' ', flags)
+ flags = re.sub(r'-isysroot\s*\S+', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
# to /usr and /System/Library by either a standalone CLT
# package or the CLT component within Xcode.
cflags = _config_vars.get('CFLAGS', '')
- m = re.search(r'-isysroot\s+(\S+)', cflags)
+ m = re.search(r'-isysroot\s*(\S+)', cflags)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
# Do not alter a config var explicitly overridden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
- flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags)
+ flags = re.sub(r'-isysroot\s*\S+(?:\s|$)', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
stripArch = stripSysroot = True
else:
stripArch = '-arch' in cc_args
- stripSysroot = '-isysroot' in cc_args
+ stripSysroot = any(arg for arg in cc_args if arg.startswith('-isysroot'))
if stripArch or 'ARCHFLAGS' in os.environ:
while True:
if stripSysroot:
while True:
- try:
- index = compiler_so.index('-isysroot')
+ indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
+ if not indices:
+ break
+ index = indices[0]
+ if compiler_so[index] == '-isysroot':
# Strip this argument and the next one:
del compiler_so[index:index+2]
- except ValueError:
- break
+ else:
+ # It's '-isysroot/some/path' in one arg
+ del compiler_so[index:index+1]
# Check if the SDK that is used during compilation actually exists,
# the universal build requires the usage of a universal SDK and not all
# users have that installed by default.
sysroot = None
- if '-isysroot' in cc_args:
- idx = cc_args.index('-isysroot')
- sysroot = cc_args[idx+1]
- elif '-isysroot' in compiler_so:
- idx = compiler_so.index('-isysroot')
- sysroot = compiler_so[idx+1]
+ argvar = cc_args
+ indices = [i for i,x in enumerate(cc_args) if x.startswith('-isysroot')]
+ if not indices:
+ argvar = compiler_so
+ indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
+
+ for idx in indices:
+ if argvar[idx] == '-isysroot':
+ sysroot = argvar[idx+1]
+ break
+ else:
+ sysroot = argvar[idx][len('-isysroot'):]
+ break
if sysroot and not os.path.isdir(sysroot):
from distutils import log
class RewriteName(NodeTransformer):
def visit_Name(self, node):
- return copy_location(Subscript(
+ return Subscript(
value=Name(id='data', ctx=Load()),
slice=Index(value=Str(s=node.id)),
ctx=node.ctx
- ), node)
+ )
Keep in mind that if the node you're operating on has child nodes you must
either transform the child nodes yourself or call the :meth:`generic_visit`
try:
# Register a dummy signal handler to ask Python to write the signal
- # number in the wakup file descriptor. _process_self_data() will
+ # number in the wakeup file descriptor. _process_self_data() will
# read signal numbers from this file descriptor to handle signals.
signal.signal(sig, _sighandler_noop)
ctype = "multipart/form-data; boundary={}".format(boundary)
headers = Message()
headers.set_type(ctype)
- headers['Content-Length'] = pdict['CONTENT-LENGTH']
+ try:
+ headers['Content-Length'] = pdict['CONTENT-LENGTH']
+ except KeyError:
+ pass
fs = FieldStorage(fp, headers=headers, encoding=encoding, errors=errors,
environ={'REQUEST_METHOD': 'POST'})
return {k: fs.getlist(k) for k in fs}
last_line_lfend = True
_read = 0
while 1:
- if self.limit is not None and _read >= self.limit:
+
+ if self.limit is not None and 0 <= self.limit <= _read:
break
line = self.fp.readline(1<<16) # bytes
self.bytes_read += len(line)
"""
import __future__
+import warnings
_features = [getattr(__future__, fname)
for fname in __future__.all_feature_names]
except SyntaxError as err:
pass
- try:
- code1 = compiler(source + "\n", filename, symbol)
- except SyntaxError as e:
- err1 = e
+ # Suppress warnings after the first compile to avoid duplication.
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ try:
+ code1 = compiler(source + "\n", filename, symbol)
+ except SyntaxError as e:
+ err1 = e
- try:
- code2 = compiler(source + "\n\n", filename, symbol)
- except SyntaxError as e:
- err2 = e
+ try:
+ code2 = compiler(source + "\n\n", filename, symbol)
+ except SyntaxError as e:
+ err2 = e
try:
if code:
source -- the source string; may contain \n characters
filename -- optional filename from which source was read; default
"<input>"
- symbol -- optional grammar start symbol; "single" (default) or "eval"
+ symbol -- optional grammar start symbol; "single" (default), "exec"
+ or "eval"
Return value / exceptions raised:
self.assertEqual(s.second, check.second)
self.assertEqual(s.third, check.third)
+ def test_callback_too_many_args(self):
+ def func(*args):
+ return len(args)
+
+ CTYPES_MAX_ARGCOUNT = 1024
+ proto = CFUNCTYPE(c_int, *(c_int,) * CTYPES_MAX_ARGCOUNT)
+ cb = proto(func)
+ args1 = (1,) * CTYPES_MAX_ARGCOUNT
+ self.assertEqual(cb(*args1), CTYPES_MAX_ARGCOUNT)
+
+ args2 = (1,) * (CTYPES_MAX_ARGCOUNT + 1)
+ with self.assertRaises(ArgumentError):
+ cb(*args2)
+
+
################################################################
if __name__ == '__main__':
import os
import sys
import unittest
+import warnings
from test.support import run_unittest
def test_suite():
+ old_filters = warnings.filters[:]
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
+ # bpo-40055: Save/restore warnings filters to leave them unchanged.
+ # Importing tests imports docutils which imports pkg_resources which adds a
+ # warnings filter.
+ warnings.filters[:] = old_filters
return suite
# vs
# /usr/lib/libedit.dylib
cflags = sysconfig.get_config_var('CFLAGS')
- m = re.search(r'-isysroot\s+(\S+)', cflags)
+ m = re.search(r'-isysroot\s*(\S+)', cflags)
if m is None:
sysroot = '/'
else:
else:
raise TypeError("Expected a module, string, or None")
+def _newline_convert(data):
+ # We have two cases to cover and we need to make sure we do
+ # them in the right order
+ for newline in ('\r\n', '\r'):
+ data = data.replace(newline, '\n')
+ return data
+
def _load_testfile(filename, package, module_relative, encoding):
if module_relative:
package = _normalize_module(package, 3)
file_contents = file_contents.decode(encoding)
# get_data() opens files as 'rb', so one must do the equivalent
# conversion as universal newlines would do.
- return file_contents.replace(os.linesep, '\n'), filename
+ return _newline_convert(file_contents), filename
with open(filename, encoding=encoding) as f:
return f.read(), filename
if value[0] in WSP:
token, value = get_fws(value)
elif value[:2] == '=?':
+ valid_ew = False
try:
token, value = get_encoded_word(value)
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"encoded word inside quoted string"))
+ valid_ew = True
except errors.HeaderParseError:
token, value = get_qcontent(value)
+ # Collapse the whitespace between two encoded words that occur in a
+ # bare-quoted-string.
+ if valid_ew and len(bare_quoted_string) > 1:
+ if (bare_quoted_string[-1].token_type == 'fws' and
+ bare_quoted_string[-2].token_type == 'encoded-word'):
+ bare_quoted_string[-1] = EWWhiteSpaceTerminal(
+ bare_quoted_string[-1], 'fws')
else:
token, value = get_qcontent(value)
bare_quoted_string.append(token)
without any Content Transfer Encoding.
"""
+
+ inputs = ''.join(filter(None, (display_name, username, domain, addr_spec)))
+ if '\r' in inputs or '\n' in inputs:
+ raise ValueError("invalid arguments; address parts cannot contain CR or LF")
+
# This clause with its potential 'raise' may only happen when an
# application program creates an Address object using an addr_spec
# keyword. The email library code itself must always supply username
import os.path
import pkgutil
import sys
+import runpy
import tempfile
__all__ = ["version", "bootstrap"]
-_SETUPTOOLS_VERSION = "41.2.0"
+_SETUPTOOLS_VERSION = "47.1.0"
-_PIP_VERSION = "19.2.3"
+_PIP_VERSION = "20.1.1"
_PROJECTS = [
- ("setuptools", _SETUPTOOLS_VERSION),
- ("pip", _PIP_VERSION),
+ ("setuptools", _SETUPTOOLS_VERSION, "py3"),
+ ("pip", _PIP_VERSION, "py2.py3"),
]
if additional_paths is not None:
sys.path = additional_paths + sys.path
- # Install the bundled software
- import pip._internal
- return pip._internal.main(args)
+ # Invoke pip as if it's the main module, and catch the exit.
+ backup_argv = sys.argv[:]
+ sys.argv[1:] = args
+ try:
+ # run_module() alters sys.modules and sys.argv, but restores them at exit
+ runpy.run_module("pip", run_name="__main__", alter_sys=True)
+ except SystemExit as exc:
+ return exc.code
+ finally:
+ sys.argv[:] = backup_argv
+
+ raise SystemError("pip did not exit, this should never happen")
def version():
# Put our bundled wheels into a temporary directory and construct the
# additional paths that need added to sys.path
additional_paths = []
- for project, version in _PROJECTS:
- wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
+ for project, version, py_tag in _PROJECTS:
+ wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag)
whl = pkgutil.get_data(
"ensurepip",
"_bundled/{}".format(wheel_name),
additional_paths.append(os.path.join(tmpdir, wheel_name))
# Construct the arguments to be passed to the pip command
- args = ["install", "--no-index", "--find-links", tmpdir]
+ args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir]
if root:
args += ["--root", root]
if upgrade:
self._member_names = []
self._last_values = []
self._ignore = []
+ self._auto_called = False
def __setitem__(self, key, value):
"""Changes anything not dundered or not a descriptor.
):
raise ValueError('_names_ are reserved for future Enum use')
if key == '_generate_next_value_':
+ # check if members already defined as auto()
+ if self._auto_called:
+ raise TypeError("_generate_next_value_ must be defined before members")
setattr(self, '_generate_next_value', value)
elif key == '_ignore_':
if isinstance(value, str):
# enum overwriting a descriptor?
raise TypeError('%r already defined as: %r' % (key, self[key]))
if isinstance(value, auto):
+ self._auto_called = True
if value.value == _auto_null:
value.value = self._generate_next_value(key, 1, len(self._member_names), self._last_values[:])
value = value.value
(self.host, self.port) = self._get_hostport(host, port)
+ self._validate_host(self.host)
+
# This is stored as an instance variable to allow unit
# tests to replace it with a suitable mockup
self._create_connection = socket.create_connection
raise InvalidURL(f"URL can't contain control characters. {url!r} "
f"(found at least {match.group()!r})")
+ def _validate_host(self, host):
+ """Validate a host so it doesn't contain control characters."""
+ # Prevent CVE-2019-18348.
+ match = _contains_disallowed_url_pchar_re.search(host)
+ if match:
+ raise InvalidURL(f"URL can't contain control characters. {host!r} "
+ f"(found at least {match.group()!r})")
+
def putheader(self, header, *values):
"""Send a request header line to the server.
--- /dev/null
+The IDLE icons are from https://bugs.python.org/issue1490384
+
+Created by Andrew Clover.
+
+The original sources are available from Andrew's website:
+https://www.doxdesk.com/software/py/pyicons.html
+
+Various different formats and sizes are available at this GitHub Pull Request:
+https://github.com/python/cpython/pull/17473
-What's New in IDLE 3.7.6
-Released on 2019-12-16?
+What's New in IDLE 3.7.8
+Released on 2020-06-27?
+======================================
+
+
+bpo-40723: Make test_idle pass when run after import.
+Patch by Florian Dahlitz.
+
+bpo-38689: IDLE will no longer freeze when inspect.signature fails
+when fetching a calltip.
+
+
+What's New in IDLE 3.7.7
+Released on 2020-03-10
======================================
+bpo-27115: For 'Go to Line', use a Query entry box subclass with
+IDLE standard behavior and improved error checking.
+
+bpo-39885: When a context menu is invoked by right-clicking outside
+of a selection, clear the selection and move the cursor. Cut and
+Copy require that the click be within the selection.
+
+bpo-39852: Edit "Go to line" now clears any selection, preventing
+accidental deletion. It also updates Ln and Col on the status bar.
bpo-39781: Selecting code context lines no longer causes a jump.
Remove unneeded arguments and dead code from pyparse
find_good_parse_start method.
+
+What's New in IDLE 3.7.6
+Released on 2019-12-18
+======================================
+
bpo-38943: Fix autocomplete windows not always appearing on some
systems. Patch by Johnny Najera.
import platform
from tkinter import *
-from tkinter.ttk import Frame, Scrollbar
+from tkinter.ttk import Scrollbar
from idlelib.autocomplete import FILES, ATTRS
from idlelib.multicall import MC_SHIFT
empty line or _MAX_LINES. For builtins, this typically includes
the arguments in addition to the return value.
'''
- argspec = default = ""
+ # Determine function object fob to inspect.
try:
ob_call = ob.__call__
- except BaseException:
- return default
-
+ except BaseException: # Buggy user object could raise anything.
+ return '' # No popup for non-callables.
fob = ob_call if isinstance(ob_call, types.MethodType) else ob
+ # Initialize argspec and wrap it to get lines.
try:
argspec = str(inspect.signature(fob))
- except ValueError as err:
+ except Exception as err:
msg = str(err)
if msg.startswith(_invalid_method):
return _invalid_method
+ else:
+ argspec = ''
if '/' in argspec and len(argspec) < _MAX_COLS - len(_argument_positional):
# Add explanation TODO remove after 3.7, before 3.9.
lines = (textwrap.wrap(argspec, _MAX_COLS, subsequent_indent=_INDENT)
if len(argspec) > _MAX_COLS else [argspec] if argspec else [])
+ # Augment lines from docstring, if any, and join to get argspec.
if isinstance(ob_call, types.MethodType):
doc = ob_call.__doc__
else:
line = line[: _MAX_COLS - 3] + '...'
lines.append(line)
argspec = '\n'.join(lines)
- if not argspec:
- argspec = _default_callable_argspec
- return argspec
+
+ return argspec or _default_callable_argspec
if __name__ == '__main__':
"""
Dialog for building Tkinter accelerator key bindings
"""
-from tkinter import Toplevel, Listbox, Text, StringVar, TclError
+from tkinter import Toplevel, Listbox, StringVar, TclError
from tkinter.ttk import Frame, Button, Checkbutton, Entry, Label, Scrollbar
from tkinter import messagebox
import string
"""
import re
-from tkinter import (Toplevel, Listbox, Text, Scale, Canvas,
+from tkinter import (Toplevel, Listbox, Scale, Canvas,
StringVar, BooleanVar, IntVar, TRUE, FALSE,
TOP, BOTTOM, RIGHT, LEFT, SOLID, GROOVE,
NONE, BOTH, X, Y, W, E, EW, NS, NSEW, NW,
rmenu = None
def right_menu_event(self, event):
- self.text.mark_set("insert", "@%d,%d" % (event.x, event.y))
+ text = self.text
+ newdex = text.index(f'@{event.x},{event.y}')
+ try:
+ in_selection = (text.compare('sel.first', '<=', newdex) and
+ text.compare(newdex, '<=', 'sel.last'))
+ except TclError:
+ in_selection = False
+ if not in_selection:
+ text.tag_remove("sel", "1.0", "end")
+ text.mark_set("insert", newdex)
if not self.rmenu:
self.make_rmenu()
rmenu = self.rmenu
self.event = event
iswin = sys.platform[:3] == 'win'
if iswin:
- self.text.config(cursor="arrow")
+ text.config(cursor="arrow")
for item in self.rmenu_specs:
try:
state = getattr(self, verify_state)()
rmenu.entryconfigure(label, state=state)
-
rmenu.tk_popup(event.x_root, event.y_root)
if iswin:
self.text.config(cursor="ibeam")
def goto_line_event(self, event):
text = self.text
- lineno = tkSimpleDialog.askinteger("Goto",
- "Go to line number:",parent=text)
- if lineno is None:
- return "break"
- if lineno <= 0:
- text.bell()
- return "break"
- text.mark_set("insert", "%d.0" % lineno)
- text.see("insert")
+ lineno = query.Goto(
+ text, "Go To Line",
+ "Enter a positive integer\n"
+ "('big' = end of file):"
+ ).result
+ if lineno is not None:
+ text.tag_remove("sel", "1.0", "end")
+ text.mark_set("insert", f'{lineno}.0')
+ text.see("insert")
+ self.set_line_and_column()
return "break"
def open_module(self):
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
- <title>IDLE — Python 3.9.0a1 documentation</title>
+ <title>IDLE — Python 3.9.0a4 documentation</title>
<link rel="stylesheet" href="../_static/pydoctheme.css" type="text/css" />
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
<script type="text/javascript" src="../_static/sidebar.js"></script>
<link rel="search" type="application/opensearchdescription+xml"
- title="Search within Python 3.9.0a1 documentation"
+ title="Search within Python 3.9.0a4 documentation"
href="../_static/opensearch.xml"/>
<link rel="author" title="About these documents" href="../about.html" />
<link rel="index" title="Index" href="../genindex.html" />
<li>
- <a href="../index.html">3.9.0a1 Documentation</a> »
+ <a href="../index.html">3.9.0a4 Documentation</a> »
</li>
<li class="nav-item nav-item-1"><a href="index.html" >The Python Standard Library</a> »</li>
</dd>
<dt>Replace…</dt><dd><p>Open a search-and-replace dialog.</p>
</dd>
-<dt>Go to Line</dt><dd><p>Move cursor to the line number requested and make that line visible.</p>
+<dt>Go to Line</dt><dd><p>Move the cursor to the beginning of the line requested and make that
+line visible. A request past the end of the file goes to the end.
+Clear any selection and update the line and column status.</p>
</dd>
<dt>Show Completions</dt><dd><p>Open a scrollable list allowing selection of keywords and attributes. See
<a class="reference internal" href="#completions"><span class="std std-ref">Completions</span></a> in the Editing and navigation section below.</p>
clash, or cannot or does not want to run as admin, it might be easiest to
completely remove Python and start over.</p>
<p>A zombie pythonw.exe process could be a problem. On Windows, use Task
-Manager to detect and stop one. Sometimes a restart initiated by a program
-crash or Keyboard Interrupt (control-C) may fail to connect. Dismissing
-the error box or Restart Shell on the Shell menu may fix a temporary problem.</p>
+Manager to check for one and stop it if there is. Sometimes a restart
+initiated by a program crash or Keyboard Interrupt (control-C) may fail
+to connect. Dismissing the error box or using Restart Shell on the Shell
+menu may fix a temporary problem.</p>
<p>When IDLE first starts, it attempts to read user configuration files in
<code class="docutils literal notranslate"><span class="pre">~/.idlerc/</span></code> (~ is one’s home directory). If there is a problem, an error
message should be displayed. Leaving aside random disk glitches, this can
-be prevented by never editing the files by hand, using the configuration
-dialog, under Options, instead Options. Once it happens, the solution may
-be to delete one or more of the configuration files.</p>
+be prevented by never editing the files by hand. Instead, use the
+configuration dialog, under Options. Once there is an error in a user
+configuration file, the best solution may be to delete it and start over
+with the settings dialog.</p>
<p>If IDLE quits with no message, and it was not started from a console, try
-starting from a console (<code class="docutils literal notranslate"><span class="pre">python</span> <span class="pre">-m</span> <span class="pre">idlelib</span></code>) and see if a message appears.</p>
+starting it from a console or terminal (<code class="docutils literal notranslate"><span class="pre">python</span> <span class="pre">-m</span> <span class="pre">idlelib</span></code>) and see if
+this results in an error message.</p>
</div>
<div class="section" id="running-user-code">
<h3>Running user code<a class="headerlink" href="#running-user-code" title="Permalink to this headline">¶</a></h3>
<li>
- <a href="../index.html">3.9.0a1 Documentation</a> »
+ <a href="../index.html">3.9.0a4 Documentation</a> »
</li>
<li class="nav-item nav-item-1"><a href="index.html" >The Python Standard Library</a> »</li>
</ul>
</div>
<div class="footer">
- © <a href="../copyright.html">Copyright</a> 2001-2019, Python Software Foundation.
+ © <a href="../copyright.html">Copyright</a> 2001-2020, Python Software Foundation.
<br />
The Python Software Foundation is a non-profit corporation.
<br />
<br />
- Last updated on Nov 24, 2019.
+ Last updated on Mar 07, 2020.
<a href="https://docs.python.org/3/bugs.html">Found a bug</a>?
<br />
acp = self.autocomplete
small, large = acp.fetch_completions(
'', ac.ATTRS)
- if __main__.__file__ != ac.__file__:
+ if hasattr(__main__, '__file__') and __main__.__file__ != ac.__file__:
self.assertNotIn('AutoComplete', small) # See issue 36405.
# Test attributes
with self.subTest(meth=meth, mtip=mtip):
self.assertEqual(get_spec(meth), mtip)
- def test_attribute_exception(self):
+ def test_buggy_getattr_class(self):
class NoCall:
- def __getattr__(self, name):
- raise BaseException
+ def __getattr__(self, name): # Not invoked for class attribute.
+ raise IndexError # Bug.
class CallA(NoCall):
- def __call__(oui, a, b, c):
+ def __call__(self, ci): # Bug does not matter.
pass
class CallB(NoCall):
- def __call__(self, ci):
+ def __call__(oui, a, b, c): # Non-standard 'self'.
pass
for meth, mtip in ((NoCall, default_tip), (CallA, default_tip),
- (NoCall(), ''), (CallA(), '(a, b, c)'),
- (CallB(), '(ci)')):
+ (NoCall(), ''), (CallA(), '(ci)'),
+ (CallB(), '(a, b, c)')):
+ with self.subTest(meth=meth, mtip=mtip):
+ self.assertEqual(get_spec(meth), mtip)
+
+ def test_metaclass_class(self): # Failure case for issue 38689.
+ class Type(type): # Type() requires 3 type args, returns class.
+ __class__ = property({}.__getitem__, {}.__setitem__)
+ class Object(metaclass=Type):
+ __slots__ = '__class__'
+ for meth, mtip in ((Type, default_tip), (Object, default_tip),
+ (Object(), '')):
with self.subTest(meth=meth, mtip=mtip):
self.assertEqual(get_spec(meth), mtip)
from collections import namedtuple
from test.support import requires
from tkinter import Tk
+from idlelib.idle_test.mock_idle import Func
Editor = editor.EditorWindow
)
+def insert(text, string):
+ text.delete('1.0', 'end')
+ text.insert('end', string)
+ text.update() # Force update for colorizer to finish.
+
+
class IndentAndNewlineTest(unittest.TestCase):
@classmethod
cls.root.destroy()
del cls.root
- def insert(self, text):
- t = self.window.text
- t.delete('1.0', 'end')
- t.insert('end', text)
- # Force update for colorizer to finish.
- t.update()
-
def test_indent_and_newline_event(self):
eq = self.assertEqual
w = self.window
w.prompt_last_line = ''
for test in tests:
with self.subTest(label=test.label):
- self.insert(test.text)
+ insert(text, test.text)
text.mark_set('insert', test.mark)
nl(event=None)
eq(get('1.0', 'end'), test.expected)
# Selected text.
- self.insert(' def f1(self, a, b):\n return a + b')
+ insert(text, ' def f1(self, a, b):\n return a + b')
text.tag_add('sel', '1.17', '1.end')
nl(None)
# Deletes selected text before adding new line.
# Preserves the whitespace in shell prompt.
w.prompt_last_line = '>>> '
- self.insert('>>> \t\ta =')
+ insert(text, '>>> \t\ta =')
text.mark_set('insert', '1.5')
nl(None)
eq(get('1.0', 'end'), '>>> \na =\n')
+class RMenuTest(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ requires('gui')
+ cls.root = Tk()
+ cls.root.withdraw()
+ cls.window = Editor(root=cls.root)
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.window._close()
+ del cls.window
+ cls.root.update_idletasks()
+ for id in cls.root.tk.call('after', 'info'):
+ cls.root.after_cancel(id)
+ cls.root.destroy()
+ del cls.root
+
+ class DummyRMenu:
+ def tk_popup(x, y): pass
+
+ def test_rclick(self):
+ pass
+
+
if __name__ == '__main__':
unittest.main(verbosity=2)
self.assertEqual(dialog.entry_error['text'], '')
+class GotoTest(unittest.TestCase):
+ "Test Goto subclass of Query."
+
+ class Dummy_ModuleName:
+ entry_ok = query.Goto.entry_ok # Function being tested.
+ def __init__(self, dummy_entry):
+ self.entry = Var(value=dummy_entry)
+ self.entry_error = {'text': ''}
+ def showerror(self, message):
+ self.entry_error['text'] = message
+
+ def test_bogus_goto(self):
+ dialog = self.Dummy_ModuleName('a')
+ self.assertEqual(dialog.entry_ok(), None)
+ self.assertIn('not a base 10 integer', dialog.entry_error['text'])
+
+ def test_bad_goto(self):
+ dialog = self.Dummy_ModuleName('0')
+ self.assertEqual(dialog.entry_ok(), None)
+ self.assertIn('not a positive integer', dialog.entry_error['text'])
+
+ def test_good_goto(self):
+ dialog = self.Dummy_ModuleName('1')
+ self.assertEqual(dialog.entry_ok(), 1)
+ self.assertEqual(dialog.entry_error['text'], '')
+
+
# 3 HelpSource test classes each test one method.
class HelpsourceBrowsefileTest(unittest.TestCase):
root.destroy()
+class GotoGuiTest(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+ requires('gui')
+
+ def test_click_module_name(self):
+ root = Tk()
+ root.withdraw()
+ dialog = query.Goto(root, 'T', 't', _utest=True)
+ dialog.entry.insert(0, '22')
+ dialog.button_ok.invoke()
+ self.assertEqual(dialog.result, 22)
+ root.destroy()
+
+
class HelpsourceGuiTest(unittest.TestCase):
@classmethod
"""Test sidebar, coverage 93%"""
import idlelib.sidebar
-from sys import platform
from itertools import chain
import unittest
import unittest.mock
"Test squeezer, coverage 95%"
-from collections import namedtuple
from textwrap import dedent
from tkinter import Text, Tk
import unittest
if sys.platform == 'win32':
try:
import ctypes
- PROCESS_SYSTEM_DPI_AWARE = 1
+ PROCESS_SYSTEM_DPI_AWARE = 1 # Int required.
ctypes.OleDLL('shcore').SetProcessDpiAwareness(PROCESS_SYSTEM_DPI_AWARE)
except (ImportError, AttributeError, OSError):
pass
def runsource(self, source):
"Extend base class method: Stuff the source in the line cache first"
filename = self.stuffsource(source)
- self.more = 0
# at the moment, InteractiveInterpreter expects str
assert isinstance(source, str)
# InteractiveInterpreter.runsource() calls its runcode() method,
def beginexecuting(self):
"Helper for ModifiedInterpreter"
self.resetoutput()
- self.executing = 1
+ self.executing = True
def endexecuting(self):
"Helper for ModifiedInterpreter"
- self.executing = 0
- self.canceled = 0
+ self.executing = False
+ self.canceled = False
self.showprompt()
def close(self):
def readline(self):
save = self.reading
try:
- self.reading = 1
+ self.reading = True
self.top.mainloop() # nested mainloop()
finally:
self.reading = save
line = "\n"
self.resetoutput()
if self.canceled:
- self.canceled = 0
+ self.canceled = False
if not use_subprocess:
raise KeyboardInterrupt
if self.endoffile:
- self.endoffile = 0
+ self.endoffile = False
line = ""
return line
self.interp.write("KeyboardInterrupt\n")
self.showprompt()
return "break"
- self.endoffile = 0
- self.canceled = 1
+ self.endoffile = False
+ self.canceled = True
if (self.executing and self.interp.rpcclt):
if self.interp.getdebugger():
self.interp.restart_subprocess()
self.resetoutput()
self.close()
else:
- self.canceled = 0
- self.endoffile = 1
+ self.canceled = False
+ self.endoffile = True
self.top.quit()
return "break"
raise ###pass # ### 11Aug07 KBK if we are expecting exceptions
# let's find out what they are and be specific.
if self.canceled:
- self.canceled = 0
+ self.canceled = False
if not use_subprocess:
raise KeyboardInterrupt
return count
iconfile = os.path.join(icondir, 'idle.ico')
root.wm_iconbitmap(default=iconfile)
elif not macosx.isAquaTk():
- ext = '.png' if TkVersion >= 8.6 else '.gif'
+ if TkVersion >= 8.6:
+ ext = '.png'
+ sizes = (16, 32, 48, 256)
+ else:
+ ext = '.gif'
+ sizes = (16, 32, 48)
iconfiles = [os.path.join(icondir, 'idle_%d%s' % (size, ext))
- for size in (16, 32, 48)]
+ for size in sizes]
icons = [PhotoImage(master=root, file=iconfile)
for iconfile in iconfiles]
root.wm_iconphoto(True, *icons)
exists=True, root=self.parent)
self.entry_error = Label(frame, text=' ', foreground='red',
font=self.error_font)
+ # Display or blank error by setting ['text'] =.
entrylabel.grid(column=0, row=0, columnspan=3, padx=5, sticky=W)
self.entry.grid(column=0, row=1, columnspan=3, padx=5, sticky=W+E,
pady=[10,0])
def entry_ok(self): # Example: usually replace.
"Return non-blank entry or None."
- self.entry_error['text'] = ''
entry = self.entry.get().strip()
if not entry:
self.showerror('blank line.')
Otherwise leave dialog open for user to correct entry or cancel.
'''
+ self.entry_error['text'] = ''
entry = self.entry_ok()
if entry is not None:
self.result = entry
def entry_ok(self):
"Return sensible ConfigParser section name or None."
- self.entry_error['text'] = ''
name = self.entry.get().strip()
if not name:
self.showerror('no name specified.')
def entry_ok(self):
"Return entered module name as file path or None."
- self.entry_error['text'] = ''
name = self.entry.get().strip()
if not name:
self.showerror('no name specified.')
return file_path
+class Goto(Query):
+ "Get a positive line number for editor Go To Line."
+ # Used in editor.EditorWindow.goto_line_event.
+
+ def entry_ok(self):
+ try:
+ lineno = int(self.entry.get())
+ except ValueError:
+ self.showerror('not a base 10 integer.')
+ return None
+ if lineno <= 0:
+ self.showerror('not a positive integer.')
+ return None
+ return lineno
+
+
class HelpSource(Query):
"Get menu name and help source for Help menu."
# Used in ConfigDialog.HelpListItemAdd/Edit, (941/9)
def entry_ok(self):
"Return apparently valid (name, path) or None"
- self.entry_error['text'] = ''
self.path_error['text'] = ''
name = self.item_ok()
path = self.path_ok()
def entry_ok(self):
"Return apparently valid (cli_args, restart) or None"
- self.entry_error['text'] = ''
cli_args = self.cli_args_ok()
restart = self.restartvar.get()
return None if cli_args is None else (cli_args, restart)
from tkinter.ttk import Frame, Scrollbar, Button
from tkinter.messagebox import showerror
-from functools import update_wrapper
from idlelib.colorizer import color_config
if __name__ == '__main__':
- main()
+ try:
+ main()
+ except BrokenPipeError as exc:
+ sys.exit(exc.errno)
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
-if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
-while_stmt: 'while' test ':' suite ['else' ':' suite]
+if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite]
+while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite]
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
try_stmt: ('try' ':' suite
((except_clause ':' suite)+
old_test: or_test | old_lambdef
old_lambdef: 'lambda' [varargslist] ':' old_test
+namedexpr_test: test [':=' test]
test: or_test ['if' or_test 'else' test] | lambdef
or_test: and_test ('or' and_test)*
and_test: not_test ('and' not_test)*
'{' [dictsetmaker] '}' |
'`' testlist1 '`' |
NAME | NUMBER | STRING+ | '.' '.' '.')
-listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
-testlist_gexp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
+listmaker: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] )
+testlist_gexp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] )
lambdef: 'lambda' [varargslist] ':' test
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
subscriptlist: subscript (',' subscript)* [',']
# multiple (test comp_for) arguments are blocked; keyword unpackings
# that precede iterable unpackings are blocked; etc.
argument: ( test [comp_for] |
+ test ':=' test |
test '=' test |
'**' test |
'*' test )
// DOUBLESLASH
//= DOUBLESLASHEQUAL
-> RARROW
+:= COLONEQUAL
"""
opmap = {}
AWAIT = 56
ASYNC = 57
ERRORTOKEN = 58
-N_TOKENS = 59
+COLONEQUAL = 59
+N_TOKENS = 60
NT_OFFSET = 256
#--end constants--
r"~")
Bracket = '[][(){}]'
-Special = group(r'\r?\n', r'[:;.,`@]')
+Special = group(r'\r?\n', r':=', r'[:;.,`@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
self.validate(s)
+class TestNamedAssignments(GrammarTest):
+
+ def test_named_assignment_if(self):
+ driver.parse_string("if f := x(): pass\n")
+
+ def test_named_assignment_while(self):
+ driver.parse_string("while f := x(): pass\n")
+
+ def test_named_assignment_generator(self):
+ driver.parse_string("any((lastNum := num) == 1 for num in [1, 2, 3])\n")
+
+ def test_named_assignment_listcomp(self):
+ driver.parse_string("[(lastNum := num) == 1 for num in [1, 2, 3]]\n")
+
+
def diff_texts(a, b, filename):
a = a.splitlines()
b = b.splitlines()
try:
stat = os.stat(fullname)
except OSError:
- del cache[filename]
+ cache.pop(filename, None)
continue
if size != stat.st_size or mtime != stat.st_mtime:
- del cache[filename]
+ cache.pop(filename, None)
def updatecache(filename, module_globals=None):
if filename in cache:
if len(cache[filename]) != 1:
- del cache[filename]
+ cache.pop(filename, None)
if not filename or (filename.startswith('<') and filename.endswith('>')):
return []
return 'AF_INET'
elif type(address) is str and address.startswith('\\\\'):
return 'AF_PIPE'
- elif type(address) is str:
+ elif type(address) is str or util.is_abstract_socket_namespace(address):
return 'AF_UNIX'
else:
raise ValueError('address type of %r unrecognized' % address)
self._family = family
self._last_accepted = None
- if family == 'AF_UNIX':
+ if family == 'AF_UNIX' and not util.is_abstract_socket_namespace(address):
+ # Linux abstract socket namespaces do not need to be explicitly unlinked
self._unlink = util.Finalize(
self, os.unlink, args=(address,), exitpriority=0
)
self._lock = threading.Lock()
self._preload_modules = ['__main__']
+ def _stop(self):
+ # Method used by unit tests to stop the server
+ with self._lock:
+ self._stop_unlocked()
+
+ def _stop_unlocked(self):
+ if self._forkserver_pid is None:
+ return
+
+ # close the "alive" file descriptor asks the server to stop
+ os.close(self._forkserver_alive_fd)
+ self._forkserver_alive_fd = None
+
+ os.waitpid(self._forkserver_pid, 0)
+ self._forkserver_pid = None
+
+ os.unlink(self._forkserver_address)
+ self._forkserver_address = None
+
def set_forkserver_preload(self, modules_names):
'''Set list of module names to try to load in forkserver process.'''
if not all(type(mod) is str for mod in self._preload_modules):
with socket.socket(socket.AF_UNIX) as listener:
address = connection.arbitrary_address('AF_UNIX')
listener.bind(address)
- os.chmod(address, 0o600)
+ if not util.is_abstract_socket_namespace(address):
+ os.chmod(address, 0o600)
listener.listen()
# all client processes own the write end of the "alive" pipe;
class Token(object):
'''
- Type to uniquely indentify a shared object
+ Type to uniquely identify a shared object
'''
__slots__ = ('typeid', 'address', 'id')
def _callmethod(self, methodname, args=(), kwds={}):
'''
- Try to call a method of the referrent and return a copy of the result
+ Try to call a method of the referent and return a copy of the result
'''
try:
conn = self._tls.connection
_log_to_stderr = True
return _logger
+
+# Abstract socket support
+
+def _platform_supports_abstract_sockets():
+ if sys.platform == "linux":
+ return True
+ if hasattr(sys, 'getandroidapilevel'):
+ return True
+ return False
+
+
+def is_abstract_socket_namespace(address):
+ if not address:
+ return False
+ if isinstance(address, bytes):
+ return address[0] == 0
+ elif isinstance(address, str):
+ return address[0] == "\0"
+ raise TypeError('address type of {address!r} unrecognized')
+
+
+abstract_sockets_supported = _platform_supports_abstract_sockets()
+
#
# Function returning a temp directory which will be removed on exit
#
finally:
os.close(errpipe_read)
os.close(errpipe_write)
+
+
+def _cleanup_tests():
+ """Cleanup multiprocessing resources when multiprocessing tests
+ completed."""
+
+ from test import support
+
+ # cleanup multiprocessing
+ process._cleanup()
+
+ # Stop the ForkServer process if it's running
+ from multiprocessing import forkserver
+ forkserver._forkserver._stop()
+
+ # bpo-37421: Explicitly call _run_finalizers() to remove immediately
+ # temporary directories created by multiprocessing.util.get_temp_dir().
+ _run_finalizers()
+ support.gc_collect()
+
+ support.reap_children()
def _select_from(self, parent_path, is_dir, exists, scandir):
try:
- entries = list(scandir(parent_path))
+ with scandir(parent_path) as scandir_it:
+ entries = list(scandir_it)
for entry in entries:
- entry_is_dir = False
- try:
- entry_is_dir = entry.is_dir()
- except OSError as e:
- if not _ignore_error(e):
- raise
- if not self.dironly or entry_is_dir:
- name = entry.name
- if self.match(name):
- path = parent_path._make_child_relpath(name)
- for p in self.successor._select_from(path, is_dir, exists, scandir):
- yield p
+ if self.dironly:
+ try:
+ # "entry.is_dir()" can raise PermissionError
+ # in some cases (see bpo-38894), which is not
+ # among the errors ignored by _ignore_error()
+ if not entry.is_dir():
+ continue
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ continue
+ name = entry.name
+ if self.match(name):
+ path = parent_path._make_child_relpath(name)
+ for p in self.successor._select_from(path, is_dir, exists, scandir):
+ yield p
except PermissionError:
return
-
class _RecursiveWildcardSelector(_Selector):
def __init__(self, pat, child_parts, flavour):
def _iterate_directories(self, parent_path, is_dir, scandir):
yield parent_path
try:
- entries = list(scandir(parent_path))
+ with scandir(parent_path) as scandir_it:
+ entries = list(scandir_it)
for entry in entries:
entry_is_dir = False
try:
else:
try:
cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion'
- with winreg.OpenKeyEx(HKEY_LOCAL_MACHINE, cvkey) as key:
- ptype = QueryValueEx(key, 'CurrentType')[0]
- except:
+ with winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, cvkey) as key:
+ ptype = winreg.QueryValueEx(key, 'CurrentType')[0]
+ except OSError:
pass
return release, version, csd, ptype
# -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Tue Mar 10 02:06:42 2020
+# Autogenerated by Sphinx on Wed Jun 17 04:38:18 2020
topics = {'assert': 'The "assert" statement\n'
'**********************\n'
'\n'
'assigned,\n'
' from left to right, to the corresponding targets.\n'
'\n'
- ' * If the target list contains one target prefixed with an\n'
- ' asterisk, called a “starred” target: The object must be '
- 'an\n'
- ' iterable with at least as many items as there are targets '
- 'in the\n'
- ' target list, minus one. The first items of the iterable '
- 'are\n'
- ' assigned, from left to right, to the targets before the '
+ ' * If the target list contains one target prefixed with an '
+ 'asterisk,\n'
+ ' called a “starred” target: The object must be an iterable '
+ 'with at\n'
+ ' least as many items as there are targets in the target '
+ 'list, minus\n'
+ ' one. The first items of the iterable are assigned, from '
+ 'left to\n'
+ ' right, to the targets before the starred target. The '
+ 'final items\n'
+ ' of the iterable are assigned to the targets after the '
'starred\n'
- ' target. The final items of the iterable are assigned to '
- 'the\n'
- ' targets after the starred target. A list of the remaining '
- 'items\n'
- ' in the iterable is then assigned to the starred target '
- '(the list\n'
- ' can be empty).\n'
+ ' target. A list of the remaining items in the iterable is '
+ 'then\n'
+ ' assigned to the starred target (the list can be empty).\n'
'\n'
' * Else: The object must be an iterable with the same number '
- 'of\n'
- ' items as there are targets in the target list, and the '
- 'items are\n'
+ 'of items\n'
+ ' as there are targets in the target list, and the items '
+ 'are\n'
' assigned, from left to right, to the corresponding '
'targets.\n'
'\n'
'in the\n'
' current local namespace.\n'
'\n'
- ' * Otherwise: the name is bound to the object in the global\n'
- ' namespace or the outer namespace determined by '
- '"nonlocal",\n'
- ' respectively.\n'
+ ' * Otherwise: the name is bound to the object in the global '
+ 'namespace\n'
+ ' or the outer namespace determined by "nonlocal", '
+ 'respectively.\n'
'\n'
' The name is rebound if it was already bound. This may cause '
'the\n'
'called with\n'
' appropriate arguments.\n'
'\n'
- '* If the target is a slicing: The primary expression in the\n'
- ' reference is evaluated. It should yield a mutable sequence '
- 'object\n'
- ' (such as a list). The assigned object should be a sequence '
- 'object\n'
- ' of the same type. Next, the lower and upper bound '
- 'expressions are\n'
- ' evaluated, insofar they are present; defaults are zero and '
- 'the\n'
- ' sequence’s length. The bounds should evaluate to integers. '
- 'If\n'
- ' either bound is negative, the sequence’s length is added to '
- 'it. The\n'
- ' resulting bounds are clipped to lie between zero and the '
+ '* If the target is a slicing: The primary expression in the '
+ 'reference\n'
+ ' is evaluated. It should yield a mutable sequence object '
+ '(such as a\n'
+ ' list). The assigned object should be a sequence object of '
+ 'the same\n'
+ ' type. Next, the lower and upper bound expressions are '
+ 'evaluated,\n'
+ ' insofar they are present; defaults are zero and the '
'sequence’s\n'
- ' length, inclusive. Finally, the sequence object is asked to '
- 'replace\n'
- ' the slice with the items of the assigned sequence. The '
- 'length of\n'
- ' the slice may be different from the length of the assigned '
+ ' length. The bounds should evaluate to integers. If either '
+ 'bound is\n'
+ ' negative, the sequence’s length is added to it. The '
+ 'resulting\n'
+ ' bounds are clipped to lie between zero and the sequence’s '
+ 'length,\n'
+ ' inclusive. Finally, the sequence object is asked to replace '
+ 'the\n'
+ ' slice with the items of the assigned sequence. The length '
+ 'of the\n'
+ ' slice may be different from the length of the assigned '
'sequence,\n'
' thus changing the length of the target sequence, if the '
'target\n'
'\n'
'-[ Footnotes ]-\n'
'\n'
- '[1] The exception is propagated to the invocation stack unless\n'
- ' there is a "finally" clause which happens to raise another\n'
- ' exception. That new exception causes the old one to be lost.\n'
+ '[1] The exception is propagated to the invocation stack unless '
+ 'there\n'
+ ' is a "finally" clause which happens to raise another '
+ 'exception.\n'
+ ' That new exception causes the old one to be lost.\n'
'\n'
- '[2] A string literal appearing as the first statement in the\n'
- ' function body is transformed into the function’s "__doc__"\n'
- ' attribute and therefore the function’s *docstring*.\n'
+ '[2] A string literal appearing as the first statement in the '
+ 'function\n'
+ ' body is transformed into the function’s "__doc__" attribute '
+ 'and\n'
+ ' therefore the function’s *docstring*.\n'
'\n'
'[3] A string literal appearing as the first statement in the class\n'
' body is transformed into the namespace’s "__doc__" item and\n'
'needs, for\n'
' example, "object.__getattribute__(self, name)".\n'
'\n'
- ' Note: This method may still be bypassed when looking '
- 'up special\n'
- ' methods as the result of implicit invocation via '
- 'language syntax\n'
- ' or built-in functions. See Special method lookup.\n'
+ ' Note:\n'
+ '\n'
+ ' This method may still be bypassed when looking up '
+ 'special methods\n'
+ ' as the result of implicit invocation via language '
+ 'syntax or\n'
+ ' built-in functions. See Special method lookup.\n'
'\n'
'object.__setattr__(self, name, value)\n'
'\n'
'\n'
' sys.modules[__name__].__class__ = VerboseModule\n'
'\n'
- 'Note: Defining module "__getattr__" and setting module '
- '"__class__"\n'
- ' only affect lookups made using the attribute access '
- 'syntax –\n'
- ' directly accessing the module globals (whether by code '
- 'within the\n'
- ' module, or via a reference to the module’s globals '
- 'dictionary) is\n'
- ' unaffected.\n'
+ 'Note:\n'
+ '\n'
+ ' Defining module "__getattr__" and setting module '
+ '"__class__" only\n'
+ ' affect lookups made using the attribute access syntax '
+ '– directly\n'
+ ' accessing the module globals (whether by code within '
+ 'the module, or\n'
+ ' via a reference to the module’s globals dictionary) is '
+ 'unaffected.\n'
'\n'
'Changed in version 3.5: "__class__" module attribute is '
'now writable.\n'
'created. The\n'
' descriptor has been assigned to *name*.\n'
'\n'
- ' Note: "__set_name__()" is only called implicitly as '
- 'part of the\n'
- ' "type" constructor, so it will need to be called '
- 'explicitly with\n'
- ' the appropriate parameters when a descriptor is '
- 'added to a class\n'
+ ' Note:\n'
+ '\n'
+ ' "__set_name__()" is only called implicitly as part '
+ 'of the "type"\n'
+ ' constructor, so it will need to be called '
+ 'explicitly with the\n'
+ ' appropriate parameters when a descriptor is added '
+ 'to a class\n'
' after initial creation:\n'
'\n'
' class A:\n'
'--------------------------\n'
'\n'
'* When inheriting from a class without *__slots__*, the '
- '*__dict__*\n'
- ' and *__weakref__* attribute of the instances will '
- 'always be\n'
- ' accessible.\n'
+ '*__dict__* and\n'
+ ' *__weakref__* attribute of the instances will always '
+ 'be accessible.\n'
'\n'
'* Without a *__dict__* variable, instances cannot be '
'assigned new\n'
' declaration.\n'
'\n'
'* Without a *__weakref__* variable for each instance, '
- 'classes\n'
- ' defining *__slots__* do not support weak references to '
- 'its\n'
- ' instances. If weak reference support is needed, then '
- 'add\n'
- ' "\'__weakref__\'" to the sequence of strings in the '
- '*__slots__*\n'
- ' declaration.\n'
+ 'classes defining\n'
+ ' *__slots__* do not support weak references to its '
+ 'instances. If weak\n'
+ ' reference support is needed, then add '
+ '"\'__weakref__\'" to the\n'
+ ' sequence of strings in the *__slots__* declaration.\n'
'\n'
'* *__slots__* are implemented at the class level by '
'creating\n'
' attribute would overwrite the descriptor assignment.\n'
'\n'
'* The action of a *__slots__* declaration is not limited '
- 'to the\n'
- ' class where it is defined. *__slots__* declared in '
- 'parents are\n'
- ' available in child classes. However, child subclasses '
- 'will get a\n'
- ' *__dict__* and *__weakref__* unless they also define '
- '*__slots__*\n'
- ' (which should only contain names of any *additional* '
- 'slots).\n'
+ 'to the class\n'
+ ' where it is defined. *__slots__* declared in parents '
+ 'are available\n'
+ ' in child classes. However, child subclasses will get a '
+ '*__dict__*\n'
+ ' and *__weakref__* unless they also define *__slots__* '
+ '(which should\n'
+ ' only contain names of any *additional* slots).\n'
'\n'
'* If a class defines a slot also defined in a base '
- 'class, the\n'
- ' instance variable defined by the base class slot is '
- 'inaccessible\n'
- ' (except by retrieving its descriptor directly from the '
- 'base class).\n'
- ' This renders the meaning of the program undefined. In '
- 'the future, a\n'
+ 'class, the instance\n'
+ ' variable defined by the base class slot is '
+ 'inaccessible (except by\n'
+ ' retrieving its descriptor directly from the base '
+ 'class). This\n'
+ ' renders the meaning of the program undefined. In the '
+ 'future, a\n'
' check may be added to prevent this.\n'
'\n'
'* Nonempty *__slots__* does not work for classes derived '
'"bytes" and "tuple".\n'
'\n'
'* Any non-string iterable may be assigned to '
- '*__slots__*. Mappings\n'
- ' may also be used; however, in the future, special '
- 'meaning may be\n'
+ '*__slots__*. Mappings may\n'
+ ' also be used; however, in the future, special meaning '
+ 'may be\n'
' assigned to the values corresponding to each key.\n'
'\n'
'* *__class__* assignment works only if both classes have '
' raise "TypeError".\n'
'\n'
'* If an iterator is used for *__slots__* then a '
- 'descriptor is\n'
- ' created for each of the iterator’s values. However, '
- 'the *__slots__*\n'
+ 'descriptor is created\n'
+ ' for each of the iterator’s values. However, the '
+ '*__slots__*\n'
' attribute will be an empty iterator.\n',
'attribute-references': 'Attribute references\n'
'********************\n'
' value is false. A counter-intuitive implication is that '
'not-a-number\n'
' values are not equal to themselves. For example, if "x =\n'
- ' float(\'NaN\')", "3 < x", "x < 3", "x == x", "x != x" are '
- 'all false.\n'
- ' This behavior is compliant with IEEE 754.\n'
+ ' float(\'NaN\')", "3 < x", "x < 3" and "x == x" are all '
+ 'false, while "x\n'
+ ' != x" is true. This behavior is compliant with IEEE 754.\n'
'\n'
'* Binary sequences (instances of "bytes" or "bytearray") can '
'be\n'
'\n'
' Strings and binary sequences cannot be directly compared.\n'
'\n'
- '* Sequences (instances of "tuple", "list", or "range") can '
- 'be\n'
- ' compared only within each of their types, with the '
- 'restriction that\n'
- ' ranges do not support order comparison. Equality '
- 'comparison across\n'
- ' these types results in inequality, and ordering comparison '
- 'across\n'
- ' these types raises "TypeError".\n'
+ '* Sequences (instances of "tuple", "list", or "range") can be '
+ 'compared\n'
+ ' only within each of their types, with the restriction that '
+ 'ranges do\n'
+ ' not support order comparison. Equality comparison across '
+ 'these\n'
+ ' types results in inequality, and ordering comparison across '
+ 'these\n'
+ ' types raises "TypeError".\n'
'\n'
' Sequences compare lexicographically using comparison of\n'
' corresponding elements, whereby reflexivity of the elements '
' false because the type is not the same).\n'
'\n'
' * Collections that support order comparison are ordered the '
- 'same\n'
- ' as their first unequal elements (for example, "[1,2,x] <= '
+ 'same as\n'
+ ' their first unequal elements (for example, "[1,2,x] <= '
'[1,2,y]"\n'
' has the same value as "x <= y"). If a corresponding '
'element does\n'
'"TypeError".\n'
'\n'
'* Sets (instances of "set" or "frozenset") can be compared '
- 'within\n'
- ' and across their types.\n'
+ 'within and\n'
+ ' across their types.\n'
'\n'
' They define order comparison operators to mean subset and '
'superset\n'
' Comparison of sets enforces reflexivity of its elements.\n'
'\n'
'* Most other built-in types have no comparison methods '
- 'implemented,\n'
- ' so they inherit the default comparison behavior.\n'
+ 'implemented, so\n'
+ ' they inherit the default comparison behavior.\n'
'\n'
'User-defined classes that customize their comparison behavior '
'should\n'
' "total_ordering()" decorator.\n'
'\n'
'* The "hash()" result should be consistent with equality. '
- 'Objects\n'
- ' that are equal should either have the same hash value, or '
- 'be marked\n'
- ' as unhashable.\n'
+ 'Objects that\n'
+ ' are equal should either have the same hash value, or be '
+ 'marked as\n'
+ ' unhashable.\n'
'\n'
'Python does not enforce these consistency rules. In fact, '
'the\n'
':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, '
'2]".\n'
'\n'
- 'Note: There is a subtlety when the sequence is being modified by '
- 'the\n'
- ' loop (this can only occur for mutable sequences, e.g. lists). '
- 'An\n'
+ 'Note:\n'
+ '\n'
+ ' There is a subtlety when the sequence is being modified by the '
+ 'loop\n'
+ ' (this can only occur for mutable sequences, e.g. lists). An\n'
' internal counter is used to keep track of which item is used '
'next,\n'
' and this is incremented on each iteration. When this counter '
'follows:\n'
'\n'
'1. The context expression (the expression given in the '
- '"with_item")\n'
- ' is evaluated to obtain a context manager.\n'
+ '"with_item") is\n'
+ ' evaluated to obtain a context manager.\n'
'\n'
'2. The context manager’s "__exit__()" is loaded for later use.\n'
'\n'
'3. The context manager’s "__enter__()" method is invoked.\n'
'\n'
- '4. If a target was included in the "with" statement, the return\n'
- ' value from "__enter__()" is assigned to it.\n'
+ '4. If a target was included in the "with" statement, the return '
+ 'value\n'
+ ' from "__enter__()" is assigned to it.\n'
+ '\n'
+ ' Note:\n'
'\n'
- ' Note: The "with" statement guarantees that if the '
- '"__enter__()"\n'
- ' method returns without an error, then "__exit__()" will '
- 'always be\n'
+ ' The "with" statement guarantees that if the "__enter__()" '
+ 'method\n'
+ ' returns without an error, then "__exit__()" will always be\n'
' called. Thus, if an error occurs during the assignment to '
'the\n'
' target list, it will be treated the same as an error '
'\n'
'-[ Footnotes ]-\n'
'\n'
- '[1] The exception is propagated to the invocation stack unless\n'
- ' there is a "finally" clause which happens to raise another\n'
- ' exception. That new exception causes the old one to be '
- 'lost.\n'
+ '[1] The exception is propagated to the invocation stack unless '
+ 'there\n'
+ ' is a "finally" clause which happens to raise another '
+ 'exception.\n'
+ ' That new exception causes the old one to be lost.\n'
'\n'
- '[2] A string literal appearing as the first statement in the\n'
- ' function body is transformed into the function’s "__doc__"\n'
- ' attribute and therefore the function’s *docstring*.\n'
+ '[2] A string literal appearing as the first statement in the '
+ 'function\n'
+ ' body is transformed into the function’s "__doc__" attribute '
+ 'and\n'
+ ' therefore the function’s *docstring*.\n'
'\n'
'[3] A string literal appearing as the first statement in the '
'class\n'
'\n'
'When a description of an arithmetic operator below uses the '
'phrase\n'
- '“the numeric arguments are converted to a common type,” this '
+ '“the numeric arguments are converted to a common type”, this '
'means\n'
'that the operator implementation for built-in types works as '
'follows:\n'
' complex;\n'
'\n'
'* otherwise, if either argument is a floating point number, '
- 'the\n'
- ' other is converted to floating point;\n'
+ 'the other\n'
+ ' is converted to floating point;\n'
'\n'
'* otherwise, both must be integers and no conversion is '
'necessary.\n'
'for\n'
' objects that still exist when the interpreter exits.\n'
'\n'
- ' Note: "del x" doesn’t directly call "x.__del__()" — the '
+ ' Note:\n'
+ '\n'
+ ' "del x" doesn’t directly call "x.__del__()" — the '
'former\n'
' decrements the reference count for "x" by one, and the '
'latter is\n'
'\n'
' See also: Documentation for the "gc" module.\n'
'\n'
- ' Warning: Due to the precarious circumstances under '
- 'which\n'
- ' "__del__()" methods are invoked, exceptions that occur '
- 'during\n'
- ' their execution are ignored, and a warning is printed '
- 'to\n'
- ' "sys.stderr" instead. In particular:\n'
+ ' Warning:\n'
+ '\n'
+ ' Due to the precarious circumstances under which '
+ '"__del__()"\n'
+ ' methods are invoked, exceptions that occur during '
+ 'their execution\n'
+ ' are ignored, and a warning is printed to "sys.stderr" '
+ 'instead.\n'
+ ' In particular:\n'
'\n'
' * "__del__()" can be invoked when arbitrary code is '
'being\n'
' that gets interrupted to execute "__del__()".\n'
'\n'
' * "__del__()" can be executed during interpreter '
- 'shutdown. As\n'
- ' a consequence, the global variables it needs to '
- 'access\n'
- ' (including other modules) may already have been '
- 'deleted or set\n'
- ' to "None". Python guarantees that globals whose name '
- 'begins\n'
- ' with a single underscore are deleted from their '
- 'module before\n'
- ' other globals are deleted; if no other references to '
- 'such\n'
- ' globals exist, this may help in assuring that '
- 'imported modules\n'
- ' are still available at the time when the "__del__()" '
- 'method is\n'
- ' called.\n'
+ 'shutdown. As a\n'
+ ' consequence, the global variables it needs to access '
+ '(including\n'
+ ' other modules) may already have been deleted or set '
+ 'to "None".\n'
+ ' Python guarantees that globals whose name begins '
+ 'with a single\n'
+ ' underscore are deleted from their module before '
+ 'other globals\n'
+ ' are deleted; if no other references to such globals '
+ 'exist, this\n'
+ ' may help in assuring that imported modules are still '
+ 'available\n'
+ ' at the time when the "__del__()" method is called.\n'
'\n'
'object.__repr__(self)\n'
'\n'
' def __hash__(self):\n'
' return hash((self.name, self.nick, self.color))\n'
'\n'
- ' Note: "hash()" truncates the value returned from an '
- 'object’s\n'
- ' custom "__hash__()" method to the size of a '
- '"Py_ssize_t". This\n'
- ' is typically 8 bytes on 64-bit builds and 4 bytes on '
- '32-bit\n'
- ' builds. If an object’s "__hash__()" must '
- 'interoperate on builds\n'
- ' of different bit sizes, be sure to check the width on '
- 'all\n'
- ' supported builds. An easy way to do this is with '
- '"python -c\n'
- ' "import sys; print(sys.hash_info.width)"".\n'
+ ' Note:\n'
+ '\n'
+ ' "hash()" truncates the value returned from an object’s '
+ 'custom\n'
+ ' "__hash__()" method to the size of a "Py_ssize_t". '
+ 'This is\n'
+ ' typically 8 bytes on 64-bit builds and 4 bytes on '
+ '32-bit builds.\n'
+ ' If an object’s "__hash__()" must interoperate on '
+ 'builds of\n'
+ ' different bit sizes, be sure to check the width on all '
+ 'supported\n'
+ ' builds. An easy way to do this is with "python -c '
+ '"import sys;\n'
+ ' print(sys.hash_info.width)"".\n'
'\n'
' If a class does not define an "__eq__()" method it '
'should not\n'
' hashable by an "isinstance(obj, '
'collections.abc.Hashable)" call.\n'
'\n'
- ' Note: By default, the "__hash__()" values of str, bytes '
- 'and\n'
- ' datetime objects are “salted” with an unpredictable '
- 'random value.\n'
+ ' Note:\n'
+ '\n'
+ ' By default, the "__hash__()" values of str, bytes and '
+ 'datetime\n'
+ ' objects are “salted” with an unpredictable random '
+ 'value.\n'
' Although they remain constant within an individual '
'Python\n'
' process, they are not predictable between repeated '
'its\n'
' value.\n'
'\n'
- ' Note: "print()" can also be used, but is not a debugger '
- 'command —\n'
- ' this executes the Python "print()" function.\n'
+ ' Note:\n'
+ '\n'
+ ' "print()" can also be used, but is not a debugger command — '
+ 'this\n'
+ ' executes the Python "print()" function.\n'
'\n'
'pp expression\n'
'\n'
'-[ Footnotes ]-\n'
'\n'
'[1] Whether a frame is considered to originate in a certain '
- 'module\n'
- ' is determined by the "__name__" in the frame globals.\n',
+ 'module is\n'
+ ' determined by the "__name__" in the frame globals.\n',
'del': 'The "del" statement\n'
'*******************\n'
'\n'
'about the\n'
'exceptional condition.\n'
'\n'
- 'Note: Exception messages are not part of the Python API. '
- 'Their\n'
- ' contents may change from one version of Python to the next '
- 'without\n'
- ' warning and should not be relied on by code which will run '
- 'under\n'
- ' multiple versions of the interpreter.\n'
+ 'Note:\n'
+ '\n'
+ ' Exception messages are not part of the Python API. Their '
+ 'contents\n'
+ ' may change from one version of Python to the next without '
+ 'warning\n'
+ ' and should not be relied on by code which will run under '
+ 'multiple\n'
+ ' versions of the interpreter.\n'
'\n'
'See also the description of the "try" statement in section The '
'try\n'
'-[ Footnotes ]-\n'
'\n'
'[1] This limitation occurs because the code that is executed '
- 'by\n'
- ' these operations is not available at the time the module '
- 'is\n'
- ' compiled.\n',
+ 'by these\n'
+ ' operations is not available at the time the module is '
+ 'compiled.\n',
'execmodel': 'Execution model\n'
'***************\n'
'\n'
'about the\n'
'exceptional condition.\n'
'\n'
- 'Note: Exception messages are not part of the Python API. '
- 'Their\n'
- ' contents may change from one version of Python to the next '
- 'without\n'
- ' warning and should not be relied on by code which will run '
- 'under\n'
- ' multiple versions of the interpreter.\n'
+ 'Note:\n'
+ '\n'
+ ' Exception messages are not part of the Python API. Their '
+ 'contents\n'
+ ' may change from one version of Python to the next without '
+ 'warning\n'
+ ' and should not be relied on by code which will run under '
+ 'multiple\n'
+ ' versions of the interpreter.\n'
'\n'
'See also the description of the "try" statement in section The '
'try\n'
'\n'
'-[ Footnotes ]-\n'
'\n'
- '[1] This limitation occurs because the code that is executed '
- 'by\n'
- ' these operations is not available at the time the module '
- 'is\n'
- ' compiled.\n',
+ '[1] This limitation occurs because the code that is executed by '
+ 'these\n'
+ ' operations is not available at the time the module is '
+ 'compiled.\n',
'exprlists': 'Expression lists\n'
'****************\n'
'\n'
'i\n'
':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, 2]".\n'
'\n'
- 'Note: There is a subtlety when the sequence is being modified by the\n'
- ' loop (this can only occur for mutable sequences, e.g. lists). An\n'
+ 'Note:\n'
+ '\n'
+ ' There is a subtlety when the sequence is being modified by the '
+ 'loop\n'
+ ' (this can only occur for mutable sequences, e.g. lists). An\n'
' internal counter is used to keep track of which item is used next,\n'
' and this is incremented on each iteration. When this counter has\n'
' reached the length of the sequence the loop terminates. This '
'defined.\n'
' See section The import statement.\n'
'\n'
- ' Note: The name "_" is often used in conjunction with\n'
+ ' Note:\n'
+ '\n'
+ ' The name "_" is often used in conjunction with\n'
' internationalization; refer to the documentation for the\n'
' "gettext" module for more information on this '
'convention.\n'
'\n'
'"__*__"\n'
- ' System-defined names. These names are defined by the '
- 'interpreter\n'
- ' and its implementation (including the standard library). '
- 'Current\n'
- ' system names are discussed in the Special method names '
- 'section and\n'
- ' elsewhere. More will likely be defined in future versions '
- 'of\n'
- ' Python. *Any* use of "__*__" names, in any context, that '
- 'does not\n'
- ' follow explicitly documented use, is subject to breakage '
- 'without\n'
- ' warning.\n'
+ ' System-defined names, informally known as “dunder” names. '
+ 'These\n'
+ ' names are defined by the interpreter and its '
+ 'implementation\n'
+ ' (including the standard library). Current system names are\n'
+ ' discussed in the Special method names section and '
+ 'elsewhere. More\n'
+ ' will likely be defined in future versions of Python. *Any* '
+ 'use of\n'
+ ' "__*__" names, in any context, that does not follow '
+ 'explicitly\n'
+ ' documented use, is subject to breakage without warning.\n'
'\n'
'"__*"\n'
' Class-private names. Names in this category, when used '
'\n'
'A non-normative HTML file listing all valid identifier '
'characters for\n'
- 'Unicode 4.1 can be found at https://www.dcl.hpi.uni-\n'
- 'potsdam.de/home/loewis/table-3131.html.\n'
+ 'Unicode 4.1 can be found at\n'
+ 'https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt\n'
'\n'
'\n'
'Keywords\n'
'defined.\n'
' See section The import statement.\n'
'\n'
- ' Note: The name "_" is often used in conjunction with\n'
+ ' Note:\n'
+ '\n'
+ ' The name "_" is often used in conjunction with\n'
' internationalization; refer to the documentation for '
'the\n'
' "gettext" module for more information on this '
'convention.\n'
'\n'
'"__*__"\n'
- ' System-defined names. These names are defined by the '
- 'interpreter\n'
- ' and its implementation (including the standard library). '
- 'Current\n'
- ' system names are discussed in the Special method names '
- 'section and\n'
- ' elsewhere. More will likely be defined in future versions '
- 'of\n'
- ' Python. *Any* use of "__*__" names, in any context, that '
- 'does not\n'
- ' follow explicitly documented use, is subject to breakage '
- 'without\n'
- ' warning.\n'
+ ' System-defined names, informally known as “dunder” names. '
+ 'These\n'
+ ' names are defined by the interpreter and its '
+ 'implementation\n'
+ ' (including the standard library). Current system names '
+ 'are\n'
+ ' discussed in the Special method names section and '
+ 'elsewhere. More\n'
+ ' will likely be defined in future versions of Python. '
+ '*Any* use of\n'
+ ' "__*__" names, in any context, that does not follow '
+ 'explicitly\n'
+ ' documented use, is subject to breakage without warning.\n'
'\n'
'"__*"\n'
' Class-private names. Names in this category, when used '
'\n'
'1. find a module, loading and initializing it if necessary\n'
'\n'
- '2. define a name or names in the local namespace for the scope\n'
- ' where the "import" statement occurs.\n'
+ '2. define a name or names in the local namespace for the scope '
+ 'where\n'
+ ' the "import" statement occurs.\n'
'\n'
'When the statement contains multiple clauses (separated by commas) '
'the\n'
'made\n'
'available in the local namespace in one of three ways:\n'
'\n'
- '* If the module name is followed by "as", then the name following\n'
- ' "as" is bound directly to the imported module.\n'
+ '* If the module name is followed by "as", then the name following '
+ '"as"\n'
+ ' is bound directly to the imported module.\n'
'\n'
'* If no other name is specified, and the module being imported is '
'a\n'
'"__rpow__()" (the\n'
' coercion rules would become too complicated).\n'
'\n'
- ' Note: If the right operand’s type is a subclass of the '
- 'left\n'
- ' operand’s type and that subclass provides the '
- 'reflected method\n'
- ' for the operation, this method will be called before '
- 'the left\n'
- ' operand’s non-reflected method. This behavior allows '
- 'subclasses\n'
- ' to override their ancestors’ operations.\n'
+ ' Note:\n'
+ '\n'
+ ' If the right operand’s type is a subclass of the left '
+ 'operand’s\n'
+ ' type and that subclass provides the reflected method '
+ 'for the\n'
+ ' operation, this method will be called before the left '
+ 'operand’s\n'
+ ' non-reflected method. This behavior allows subclasses '
+ 'to\n'
+ ' override their ancestors’ operations.\n'
'\n'
'object.__iadd__(self, other)\n'
'object.__isub__(self, other)\n'
'numeric\n'
' object is an integer type. Must return an integer.\n'
'\n'
- ' Note: In order to have a coherent integer type class, '
- 'when\n'
+ ' Note:\n'
+ '\n'
+ ' In order to have a coherent integer type class, when\n'
' "__index__()" is defined "__int__()" should also be '
'defined, and\n'
' both should return the same value.\n'
'program is represented by objects or by relations between '
'objects. (In\n'
'a sense, and in conformance to Von Neumann’s model of a “stored\n'
- 'program computer,” code is also represented by objects.)\n'
+ 'program computer”, code is also represented by objects.)\n'
'\n'
'Every object has an identity, a type and a value. An object’s\n'
'*identity* never changes once it has been created; you may think '
'-[ Footnotes ]-\n'
'\n'
'[1] While "abs(x%y) < abs(y)" is true mathematically, '
- 'for floats\n'
- ' it may not be true numerically due to roundoff. For '
+ 'for floats it\n'
+ ' may not be true numerically due to roundoff. For '
'example, and\n'
' assuming a platform on which a Python float is an '
'IEEE 754 double-\n'
'"unicodedata.normalize()".\n'
'\n'
'[4] Due to automatic garbage-collection, free lists, and '
- 'the\n'
- ' dynamic nature of descriptors, you may notice '
- 'seemingly unusual\n'
- ' behaviour in certain uses of the "is" operator, like '
- 'those\n'
- ' involving comparisons between instance methods, or '
- 'constants.\n'
- ' Check their documentation for more info.\n'
+ 'the dynamic\n'
+ ' nature of descriptors, you may notice seemingly '
+ 'unusual behaviour\n'
+ ' in certain uses of the "is" operator, like those '
+ 'involving\n'
+ ' comparisons between instance methods, or constants. '
+ 'Check their\n'
+ ' documentation for more info.\n'
'\n'
'[5] The "%" operator is also used for string formatting; '
'the same\n'
' precedence applies.\n'
'\n'
'[6] The power operator "**" binds less tightly than an '
- 'arithmetic\n'
- ' or bitwise unary operator on its right, that is, '
+ 'arithmetic or\n'
+ ' bitwise unary operator on its right, that is, '
'"2**-1" is "0.5".\n',
'pass': 'The "pass" statement\n'
'********************\n'
'\n'
' New in version 3.4.\n'
'\n'
- 'Note: Slicing is done exclusively with the following three '
- 'methods.\n'
- ' A call like\n'
+ 'Note:\n'
+ '\n'
+ ' Slicing is done exclusively with the following three '
+ 'methods. A\n'
+ ' call like\n'
'\n'
' a[1:2] = b\n'
'\n'
'the\n'
' container), "KeyError" should be raised.\n'
'\n'
- ' Note: "for" loops expect that an "IndexError" will be '
+ ' Note:\n'
+ '\n'
+ ' "for" loops expect that an "IndexError" will be '
'raised for\n'
' illegal indexes to allow proper detection of the end '
'of the\n'
'-[ Footnotes ]-\n'
'\n'
'[1] Additional information on these special methods may be '
- 'found\n'
- ' in the Python Reference Manual (Basic customization).\n'
+ 'found in\n'
+ ' the Python Reference Manual (Basic customization).\n'
'\n'
'[2] As a consequence, the list "[1, 2]" is considered equal '
- 'to\n'
- ' "[1.0, 2.0]", and similarly for tuples.\n'
+ 'to "[1.0,\n'
+ ' 2.0]", and similarly for tuples.\n'
'\n'
'[3] They must have since the parser can’t tell the type of '
'the\n'
' operands.\n'
'\n'
'[4] Cased characters are those with general category '
- 'property\n'
- ' being one of “Lu” (Letter, uppercase), “Ll” (Letter, '
- 'lowercase),\n'
- ' or “Lt” (Letter, titlecase).\n'
- '\n'
- '[5] To format only a tuple you should therefore provide a\n'
- ' singleton tuple whose only element is the tuple to be '
- 'formatted.\n',
+ 'property being\n'
+ ' one of “Lu” (Letter, uppercase), “Ll” (Letter, '
+ 'lowercase), or “Lt”\n'
+ ' (Letter, titlecase).\n'
+ '\n'
+ '[5] To format only a tuple you should therefore provide a '
+ 'singleton\n'
+ ' tuple whose only element is the tuple to be formatted.\n',
'specialnames': 'Special method names\n'
'********************\n'
'\n'
'for\n'
' objects that still exist when the interpreter exits.\n'
'\n'
- ' Note: "del x" doesn’t directly call "x.__del__()" — the '
+ ' Note:\n'
+ '\n'
+ ' "del x" doesn’t directly call "x.__del__()" — the '
'former\n'
' decrements the reference count for "x" by one, and the '
'latter is\n'
'\n'
' See also: Documentation for the "gc" module.\n'
'\n'
- ' Warning: Due to the precarious circumstances under which\n'
- ' "__del__()" methods are invoked, exceptions that occur '
- 'during\n'
- ' their execution are ignored, and a warning is printed '
- 'to\n'
- ' "sys.stderr" instead. In particular:\n'
+ ' Warning:\n'
+ '\n'
+ ' Due to the precarious circumstances under which '
+ '"__del__()"\n'
+ ' methods are invoked, exceptions that occur during their '
+ 'execution\n'
+ ' are ignored, and a warning is printed to "sys.stderr" '
+ 'instead.\n'
+ ' In particular:\n'
'\n'
' * "__del__()" can be invoked when arbitrary code is '
'being\n'
' that gets interrupted to execute "__del__()".\n'
'\n'
' * "__del__()" can be executed during interpreter '
- 'shutdown. As\n'
- ' a consequence, the global variables it needs to '
- 'access\n'
- ' (including other modules) may already have been '
- 'deleted or set\n'
- ' to "None". Python guarantees that globals whose name '
- 'begins\n'
- ' with a single underscore are deleted from their '
- 'module before\n'
- ' other globals are deleted; if no other references to '
- 'such\n'
- ' globals exist, this may help in assuring that '
- 'imported modules\n'
- ' are still available at the time when the "__del__()" '
- 'method is\n'
- ' called.\n'
+ 'shutdown. As a\n'
+ ' consequence, the global variables it needs to access '
+ '(including\n'
+ ' other modules) may already have been deleted or set '
+ 'to "None".\n'
+ ' Python guarantees that globals whose name begins with '
+ 'a single\n'
+ ' underscore are deleted from their module before other '
+ 'globals\n'
+ ' are deleted; if no other references to such globals '
+ 'exist, this\n'
+ ' may help in assuring that imported modules are still '
+ 'available\n'
+ ' at the time when the "__del__()" method is called.\n'
'\n'
'object.__repr__(self)\n'
'\n'
' def __hash__(self):\n'
' return hash((self.name, self.nick, self.color))\n'
'\n'
- ' Note: "hash()" truncates the value returned from an '
- 'object’s\n'
- ' custom "__hash__()" method to the size of a '
- '"Py_ssize_t". This\n'
- ' is typically 8 bytes on 64-bit builds and 4 bytes on '
- '32-bit\n'
- ' builds. If an object’s "__hash__()" must interoperate '
- 'on builds\n'
- ' of different bit sizes, be sure to check the width on '
- 'all\n'
- ' supported builds. An easy way to do this is with '
- '"python -c\n'
- ' "import sys; print(sys.hash_info.width)"".\n'
+ ' Note:\n'
+ '\n'
+ ' "hash()" truncates the value returned from an object’s '
+ 'custom\n'
+ ' "__hash__()" method to the size of a "Py_ssize_t". '
+ 'This is\n'
+ ' typically 8 bytes on 64-bit builds and 4 bytes on '
+ '32-bit builds.\n'
+ ' If an object’s "__hash__()" must interoperate on '
+ 'builds of\n'
+ ' different bit sizes, be sure to check the width on all '
+ 'supported\n'
+ ' builds. An easy way to do this is with "python -c '
+ '"import sys;\n'
+ ' print(sys.hash_info.width)"".\n'
'\n'
' If a class does not define an "__eq__()" method it should '
'not\n'
' hashable by an "isinstance(obj, '
'collections.abc.Hashable)" call.\n'
'\n'
- ' Note: By default, the "__hash__()" values of str, bytes '
- 'and\n'
- ' datetime objects are “salted” with an unpredictable '
- 'random value.\n'
+ ' Note:\n'
+ '\n'
+ ' By default, the "__hash__()" values of str, bytes and '
+ 'datetime\n'
+ ' objects are “salted” with an unpredictable random '
+ 'value.\n'
' Although they remain constant within an individual '
'Python\n'
' process, they are not predictable between repeated '
'needs, for\n'
' example, "object.__getattribute__(self, name)".\n'
'\n'
- ' Note: This method may still be bypassed when looking up '
- 'special\n'
- ' methods as the result of implicit invocation via '
- 'language syntax\n'
- ' or built-in functions. See Special method lookup.\n'
+ ' Note:\n'
+ '\n'
+ ' This method may still be bypassed when looking up '
+ 'special methods\n'
+ ' as the result of implicit invocation via language '
+ 'syntax or\n'
+ ' built-in functions. See Special method lookup.\n'
'\n'
'object.__setattr__(self, name, value)\n'
'\n'
'\n'
' sys.modules[__name__].__class__ = VerboseModule\n'
'\n'
- 'Note: Defining module "__getattr__" and setting module '
- '"__class__"\n'
- ' only affect lookups made using the attribute access syntax '
- '–\n'
- ' directly accessing the module globals (whether by code '
- 'within the\n'
- ' module, or via a reference to the module’s globals '
- 'dictionary) is\n'
- ' unaffected.\n'
+ 'Note:\n'
+ '\n'
+ ' Defining module "__getattr__" and setting module '
+ '"__class__" only\n'
+ ' affect lookups made using the attribute access syntax – '
+ 'directly\n'
+ ' accessing the module globals (whether by code within the '
+ 'module, or\n'
+ ' via a reference to the module’s globals dictionary) is '
+ 'unaffected.\n'
'\n'
'Changed in version 3.5: "__class__" module attribute is now '
'writable.\n'
'The\n'
' descriptor has been assigned to *name*.\n'
'\n'
- ' Note: "__set_name__()" is only called implicitly as part '
- 'of the\n'
- ' "type" constructor, so it will need to be called '
- 'explicitly with\n'
- ' the appropriate parameters when a descriptor is added '
- 'to a class\n'
+ ' Note:\n'
+ '\n'
+ ' "__set_name__()" is only called implicitly as part of '
+ 'the "type"\n'
+ ' constructor, so it will need to be called explicitly '
+ 'with the\n'
+ ' appropriate parameters when a descriptor is added to a '
+ 'class\n'
' after initial creation:\n'
'\n'
' class A:\n'
'~~~~~~~~~~~~~~~~~~~~~~~~~~\n'
'\n'
'* When inheriting from a class without *__slots__*, the '
- '*__dict__*\n'
- ' and *__weakref__* attribute of the instances will always '
- 'be\n'
- ' accessible.\n'
+ '*__dict__* and\n'
+ ' *__weakref__* attribute of the instances will always be '
+ 'accessible.\n'
'\n'
'* Without a *__dict__* variable, instances cannot be '
'assigned new\n'
' declaration.\n'
'\n'
'* Without a *__weakref__* variable for each instance, '
- 'classes\n'
- ' defining *__slots__* do not support weak references to '
- 'its\n'
- ' instances. If weak reference support is needed, then add\n'
- ' "\'__weakref__\'" to the sequence of strings in the '
- '*__slots__*\n'
- ' declaration.\n'
+ 'classes defining\n'
+ ' *__slots__* do not support weak references to its '
+ 'instances. If weak\n'
+ ' reference support is needed, then add "\'__weakref__\'" to '
+ 'the\n'
+ ' sequence of strings in the *__slots__* declaration.\n'
'\n'
'* *__slots__* are implemented at the class level by '
'creating\n'
' attribute would overwrite the descriptor assignment.\n'
'\n'
'* The action of a *__slots__* declaration is not limited to '
- 'the\n'
- ' class where it is defined. *__slots__* declared in '
- 'parents are\n'
- ' available in child classes. However, child subclasses will '
- 'get a\n'
- ' *__dict__* and *__weakref__* unless they also define '
- '*__slots__*\n'
- ' (which should only contain names of any *additional* '
- 'slots).\n'
+ 'the class\n'
+ ' where it is defined. *__slots__* declared in parents are '
+ 'available\n'
+ ' in child classes. However, child subclasses will get a '
+ '*__dict__*\n'
+ ' and *__weakref__* unless they also define *__slots__* '
+ '(which should\n'
+ ' only contain names of any *additional* slots).\n'
'\n'
'* If a class defines a slot also defined in a base class, '
- 'the\n'
- ' instance variable defined by the base class slot is '
- 'inaccessible\n'
- ' (except by retrieving its descriptor directly from the '
- 'base class).\n'
- ' This renders the meaning of the program undefined. In the '
+ 'the instance\n'
+ ' variable defined by the base class slot is inaccessible '
+ '(except by\n'
+ ' retrieving its descriptor directly from the base class). '
+ 'This\n'
+ ' renders the meaning of the program undefined. In the '
'future, a\n'
' check may be added to prevent this.\n'
'\n'
'and "tuple".\n'
'\n'
'* Any non-string iterable may be assigned to *__slots__*. '
- 'Mappings\n'
- ' may also be used; however, in the future, special meaning '
- 'may be\n'
+ 'Mappings may\n'
+ ' also be used; however, in the future, special meaning may '
+ 'be\n'
' assigned to the values corresponding to each key.\n'
'\n'
'* *__class__* assignment works only if both classes have the '
' raise "TypeError".\n'
'\n'
'* If an iterator is used for *__slots__* then a descriptor '
- 'is\n'
- ' created for each of the iterator’s values. However, the '
+ 'is created\n'
+ ' for each of the iterator’s values. However, the '
'*__slots__*\n'
' attribute will be an empty iterator.\n'
'\n'
'does nothing,\n'
' but raises an error if it is called with any arguments.\n'
'\n'
- ' Note: The metaclass hint "metaclass" is consumed by the '
- 'rest of\n'
- ' the type machinery, and is never passed to '
+ ' Note:\n'
+ '\n'
+ ' The metaclass hint "metaclass" is consumed by the rest '
+ 'of the\n'
+ ' type machinery, and is never passed to '
'"__init_subclass__"\n'
' implementations. The actual metaclass (rather than the '
'explicit\n'
'tuple may\n'
'be empty, in such case the original base is ignored.\n'
'\n'
- 'See also: **PEP 560** - Core support for typing module and '
- 'generic\n'
- ' types\n'
+ 'See also:\n'
+ '\n'
+ ' **PEP 560** - Core support for typing module and generic '
+ 'types\n'
'\n'
'\n'
'Determining the appropriate metaclass\n'
'type hints,\n'
'other usage is discouraged.\n'
'\n'
- 'See also: **PEP 560** - Core support for typing module and '
- 'generic\n'
- ' types\n'
+ 'See also:\n'
+ '\n'
+ ' **PEP 560** - Core support for typing module and generic '
+ 'types\n'
'\n'
'\n'
'Emulating callable objects\n'
'\n'
' New in version 3.4.\n'
'\n'
- 'Note: Slicing is done exclusively with the following three '
- 'methods.\n'
- ' A call like\n'
+ 'Note:\n'
+ '\n'
+ ' Slicing is done exclusively with the following three '
+ 'methods. A\n'
+ ' call like\n'
'\n'
' a[1:2] = b\n'
'\n'
'the\n'
' container), "KeyError" should be raised.\n'
'\n'
- ' Note: "for" loops expect that an "IndexError" will be '
- 'raised for\n'
+ ' Note:\n'
+ '\n'
+ ' "for" loops expect that an "IndexError" will be raised '
+ 'for\n'
' illegal indexes to allow proper detection of the end of '
'the\n'
' sequence.\n'
'"__rpow__()" (the\n'
' coercion rules would become too complicated).\n'
'\n'
- ' Note: If the right operand’s type is a subclass of the '
- 'left\n'
- ' operand’s type and that subclass provides the reflected '
- 'method\n'
- ' for the operation, this method will be called before '
- 'the left\n'
- ' operand’s non-reflected method. This behavior allows '
- 'subclasses\n'
- ' to override their ancestors’ operations.\n'
+ ' Note:\n'
+ '\n'
+ ' If the right operand’s type is a subclass of the left '
+ 'operand’s\n'
+ ' type and that subclass provides the reflected method '
+ 'for the\n'
+ ' operation, this method will be called before the left '
+ 'operand’s\n'
+ ' non-reflected method. This behavior allows subclasses '
+ 'to\n'
+ ' override their ancestors’ operations.\n'
'\n'
'object.__iadd__(self, other)\n'
'object.__isub__(self, other)\n'
'numeric\n'
' object is an integer type. Must return an integer.\n'
'\n'
- ' Note: In order to have a coherent integer type class, '
- 'when\n'
+ ' Note:\n'
+ '\n'
+ ' In order to have a coherent integer type class, when\n'
' "__index__()" is defined "__int__()" should also be '
'defined, and\n'
' both should return the same value.\n'
'"-1" if\n'
' *sub* is not found.\n'
'\n'
- ' Note: The "find()" method should be used only if you '
- 'need to know\n'
- ' the position of *sub*. To check if *sub* is a '
- 'substring or not,\n'
- ' use the "in" operator:\n'
+ ' Note:\n'
+ '\n'
+ ' The "find()" method should be used only if you need '
+ 'to know the\n'
+ ' position of *sub*. To check if *sub* is a substring '
+ 'or not, use\n'
+ ' the "in" operator:\n'
'\n'
" >>> 'Py' in 'Python'\n"
' True\n'
' formatting options that can be specified in format '
'strings.\n'
'\n'
- ' Note: When formatting a number ("int", "float", '
- '"complex",\n'
+ ' Note:\n'
+ '\n'
+ ' When formatting a number ("int", "float", "complex",\n'
' "decimal.Decimal" and subclasses) with the "n" type '
'(ex:\n'
' "\'{:n}\'.format(1234)"), the function temporarily '
'\n'
'2. Unlike in Standard C, exactly two hex digits are required.\n'
'\n'
- '3. In a bytes literal, hexadecimal and octal escapes denote the\n'
- ' byte with the given value. In a string literal, these escapes\n'
- ' denote a Unicode character with the given value.\n'
+ '3. In a bytes literal, hexadecimal and octal escapes denote the '
+ 'byte\n'
+ ' with the given value. In a string literal, these escapes '
+ 'denote a\n'
+ ' Unicode character with the given value.\n'
'\n'
'4. Changed in version 3.3: Support for name aliases [1] has been\n'
' added.\n'
'\n'
'5. Exactly four hex digits are required.\n'
'\n'
- '6. Any Unicode character can be encoded this way. Exactly eight\n'
- ' hex digits are required.\n'
+ '6. Any Unicode character can be encoded this way. Exactly eight '
+ 'hex\n'
+ ' digits are required.\n'
'\n'
'Unlike Standard C, all unrecognized escape sequences are left in '
'the\n'
' then they can be used interchangeably to index the same\n'
' dictionary entry.\n'
'\n'
+ ' Dictionaries preserve insertion order, meaning that keys will '
+ 'be\n'
+ ' produced in the same order they were added sequentially over '
+ 'the\n'
+ ' dictionary. Replacing an existing key does not change the '
+ 'order,\n'
+ ' however removing a key and re-inserting it will add it to '
+ 'the\n'
+ ' end instead of keeping its old place.\n'
+ '\n'
' Dictionaries are mutable; they can be created by the "{...}"\n'
' notation (see section Dictionary displays).\n'
'\n'
'"collections"\n'
' module.\n'
'\n'
+ ' Changed in version 3.7: Dictionaries did not preserve '
+ 'insertion\n'
+ ' order in versions of Python before 3.6. In CPython 3.6,\n'
+ ' insertion order was preserved, but it was considered an\n'
+ ' implementation detail at that time rather than a language\n'
+ ' guarantee.\n'
+ '\n'
'Callable types\n'
' These are the types to which the function call operation (see\n'
' section Calls) can be applied:\n'
'detail of\n'
' CPython from 3.6.\n'
'\n'
- 'See also: "types.MappingProxyType" can be used to create a '
- 'read-only\n'
- ' view of a "dict".\n'
+ 'See also:\n'
+ '\n'
+ ' "types.MappingProxyType" can be used to create a read-only '
+ 'view of a\n'
+ ' "dict".\n'
'\n'
'\n'
'Dictionary view objects\n'
'"None", it\n'
' is treated like "1".\n'
'\n'
- '6. Concatenating immutable sequences always results in a new\n'
- ' object. This means that building up a sequence by repeated\n'
- ' concatenation will have a quadratic runtime cost in the '
- 'total\n'
- ' sequence length. To get a linear runtime cost, you must '
- 'switch to\n'
- ' one of the alternatives below:\n'
+ '6. Concatenating immutable sequences always results in a new '
+ 'object.\n'
+ ' This means that building up a sequence by repeated '
+ 'concatenation\n'
+ ' will have a quadratic runtime cost in the total sequence '
+ 'length.\n'
+ ' To get a linear runtime cost, you must switch to one of the\n'
+ ' alternatives below:\n'
'\n'
' * if concatenating "str" objects, you can build a list and '
'use\n'
' * for other types, investigate the relevant class '
'documentation\n'
'\n'
- '7. Some sequence types (such as "range") only support item\n'
- ' sequences that follow specific patterns, and hence don’t '
- 'support\n'
- ' sequence concatenation or repetition.\n'
- '\n'
- '8. "index" raises "ValueError" when *x* is not found in *s*. '
- 'Not\n'
- ' all implementations support passing the additional arguments '
- '*i*\n'
- ' and *j*. These arguments allow efficient searching of '
- 'subsections\n'
- ' of the sequence. Passing the extra arguments is roughly '
- 'equivalent\n'
- ' to using "s[i:j].index(x)", only without copying any data and '
- 'with\n'
- ' the returned index being relative to the start of the '
+ '7. Some sequence types (such as "range") only support item '
+ 'sequences\n'
+ ' that follow specific patterns, and hence don’t support '
'sequence\n'
- ' rather than the start of the slice.\n'
+ ' concatenation or repetition.\n'
+ '\n'
+ '8. "index" raises "ValueError" when *x* is not found in *s*. Not '
+ 'all\n'
+ ' implementations support passing the additional arguments *i* '
+ 'and\n'
+ ' *j*. These arguments allow efficient searching of subsections '
+ 'of\n'
+ ' the sequence. Passing the extra arguments is roughly '
+ 'equivalent to\n'
+ ' using "s[i:j].index(x)", only without copying any data and '
+ 'with the\n'
+ ' returned index being relative to the start of the sequence '
+ 'rather\n'
+ ' than the start of the slice.\n'
'\n'
'\n'
'Immutable Sequence Types\n'
'1. *t* must have the same length as the slice it is replacing.\n'
'\n'
'2. The optional argument *i* defaults to "-1", so that by '
- 'default\n'
- ' the last item is removed and returned.\n'
+ 'default the\n'
+ ' last item is removed and returned.\n'
'\n'
'3. "remove" raises "ValueError" when *x* is not found in *s*.\n'
'\n'
- '4. The "reverse()" method modifies the sequence in place for\n'
- ' economy of space when reversing a large sequence. To remind '
- 'users\n'
- ' that it operates by side effect, it does not return the '
- 'reversed\n'
- ' sequence.\n'
+ '4. The "reverse()" method modifies the sequence in place for '
+ 'economy\n'
+ ' of space when reversing a large sequence. To remind users '
+ 'that it\n'
+ ' operates by side effect, it does not return the reversed '
+ 'sequence.\n'
'\n'
'5. "clear()" and "copy()" are included for consistency with the\n'
' interfaces of mutable containers that don’t support slicing\n'
' * Using a pair of square brackets to denote the empty list: '
'"[]"\n'
'\n'
- ' * Using square brackets, separating items with commas: '
- '"[a]",\n'
- ' "[a, b, c]"\n'
+ ' * Using square brackets, separating items with commas: "[a]", '
+ '"[a,\n'
+ ' b, c]"\n'
'\n'
' * Using a list comprehension: "[x for x in iterable]"\n'
'\n'
'\n'
'See also:\n'
'\n'
- ' * The linspace recipe shows how to implement a lazy version '
- 'of\n'
- ' range suitable for floating point applications.\n',
+ ' * The linspace recipe shows how to implement a lazy version of '
+ 'range\n'
+ ' suitable for floating point applications.\n',
'typesseq-mutable': 'Mutable Sequence Types\n'
'**********************\n'
'\n'
'replacing.\n'
'\n'
'2. The optional argument *i* defaults to "-1", so that '
- 'by default\n'
- ' the last item is removed and returned.\n'
+ 'by default the\n'
+ ' last item is removed and returned.\n'
'\n'
'3. "remove" raises "ValueError" when *x* is not found in '
'*s*.\n'
'\n'
'4. The "reverse()" method modifies the sequence in place '
- 'for\n'
- ' economy of space when reversing a large sequence. To '
- 'remind users\n'
- ' that it operates by side effect, it does not return '
- 'the reversed\n'
- ' sequence.\n'
+ 'for economy\n'
+ ' of space when reversing a large sequence. To remind '
+ 'users that it\n'
+ ' operates by side effect, it does not return the '
+ 'reversed sequence.\n'
'\n'
'5. "clear()" and "copy()" are included for consistency '
'with the\n'
'The execution of the "with" statement with one “item” proceeds as\n'
'follows:\n'
'\n'
- '1. The context expression (the expression given in the "with_item")\n'
- ' is evaluated to obtain a context manager.\n'
+ '1. The context expression (the expression given in the "with_item") '
+ 'is\n'
+ ' evaluated to obtain a context manager.\n'
'\n'
'2. The context manager’s "__exit__()" is loaded for later use.\n'
'\n'
'3. The context manager’s "__enter__()" method is invoked.\n'
'\n'
- '4. If a target was included in the "with" statement, the return\n'
- ' value from "__enter__()" is assigned to it.\n'
+ '4. If a target was included in the "with" statement, the return '
+ 'value\n'
+ ' from "__enter__()" is assigned to it.\n'
+ '\n'
+ ' Note:\n'
'\n'
- ' Note: The "with" statement guarantees that if the "__enter__()"\n'
- ' method returns without an error, then "__exit__()" will always '
- 'be\n'
+ ' The "with" statement guarantees that if the "__enter__()" '
+ 'method\n'
+ ' returns without an error, then "__exit__()" will always be\n'
' called. Thus, if an error occurs during the assignment to the\n'
' target list, it will be treated the same as an error occurring\n'
' within the suite would be. See step 6 below.\n'
"|" A|B, creates an RE that will match either A or B.
(...) Matches the RE inside the parentheses.
The contents can be retrieved or matched later in the string.
- (?aiLmsux) Set the A, I, L, M, S, U, or X flag for the RE (see below).
+ (?aiLmsux) The letters set the corresponding flags defined below.
(?:...) Non-grouping version of regular parentheses.
(?P<name>...) The substring matched by the group is accessible by name.
(?P=name) Matches the text matched earlier by the group named name.
purge Clear the regular expression cache.
escape Backslash all non-alphanumerics in a string.
-Some of the functions in this module takes flags as optional parameters:
+Each function other than purge and escape can take an optional 'flags' argument
+consisting of one or more of the following module constants, joined by "|".
+A, L, and U are mutually exclusive.
A ASCII For string patterns, make \w, \W, \b, \B, \d, \D
match the corresponding ASCII character categories
(rather than the whole Unicode categories, which is the
def CheckColumnNameWithSpaces(self):
cur = self.con.cursor()
cur.execute('select 1 as "foo bar [datetime]"')
- self.assertEqual(cur.description[0][0], "foo bar")
+ self.assertEqual(cur.description[0][0], "foo bar [datetime]")
cur.execute('select 1 as "foo baz"')
self.assertEqual(cur.description[0][0], "foo baz")
def CheckColName(self):
self.cur.execute("insert into test(x) values (?)", ("xxx",))
- self.cur.execute('select x as "x [bar]" from test')
+ self.cur.execute('select x as "x y [bar]" from test')
val = self.cur.fetchone()[0]
self.assertEqual(val, "<xxx>")
# Check if the stripping of colnames works. Everything after the first
- # whitespace should be stripped.
- self.assertEqual(self.cur.description[0][0], "x")
+ # '[' (and the preceeding space) should be stripped.
+ self.assertEqual(self.cur.description[0][0], "x y")
def CheckCaseInConverterName(self):
self.cur.execute("select 'other' as \"x [b1b1]\"")
return bool(self.__scope == GLOBAL_EXPLICIT)
def is_local(self):
- return bool(self.__flags & DEF_BOUND)
+ return bool(self.__scope in (LOCAL, CELL))
def is_annotated(self):
return bool(self.__flags & DEF_ANNOT)
return self._file.readlines(*args)
def seek(self, *args):
- self._file.seek(*args)
+ return self._file.seek(*args)
@property
def softspace(self):
if self.TYPE == 'processes':
self.assertRaises(OSError, l.accept)
+ @unittest.skipUnless(util.abstract_sockets_supported,
+ "test needs abstract socket support")
+ def test_abstract_socket(self):
+ with self.connection.Listener("\0something") as listener:
+ with self.connection.Client(listener.address) as client:
+ with listener.accept() as d:
+ client.send(1729)
+ self.assertEqual(d.recv(), 1729)
+
+ if self.TYPE == 'processes':
+ self.assertRaises(OSError, listener.accept)
+
+
class _TestListenerClient(BaseTestCase):
ALLOWED_TYPES = ('processes', 'threads')
ALLOWED_TYPES = ('processes',)
+ def setUp(self):
+ super().setUp()
+ # Make pristine heap for these tests
+ self.old_heap = multiprocessing.heap.BufferWrapper._heap
+ multiprocessing.heap.BufferWrapper._heap = multiprocessing.heap.Heap()
+
+ def tearDown(self):
+ multiprocessing.heap.BufferWrapper._heap = self.old_heap
+ super().tearDown()
+
def test_heap(self):
iterations = 5000
maxblocks = 50
# Sleep 500 ms to give time to child processes to complete.
if need_sleep:
time.sleep(0.5)
- multiprocessing.process._cleanup()
- test.support.gc_collect()
+
+ multiprocessing.util._cleanup_tests()
remote_globs['setUpModule'] = setUpModule
remote_globs['tearDownModule'] = tearDownModule
test_Py_buffer_converter_impl(PyObject *module, Py_buffer *a, Py_buffer *b,
Py_buffer *c, Py_buffer *d, Py_buffer *e)
/*[clinic end generated code: output=92937215f10bc937 input=6a9da0f56f9525fd]*/
+
+/*[clinic input]
+output push
+output preset buffer
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=5bff3376ee0df0b5]*/
+
+#ifdef CONDITION_A
+/*[clinic input]
+test_preprocessor_guarded_condition_a
+[clinic start generated code]*/
+
+static PyObject *
+test_preprocessor_guarded_condition_a_impl(PyObject *module)
+/*[clinic end generated code: output=ad012af18085add6 input=8edb8706a98cda7e]*/
+#elif CONDITION_B
+/*[clinic input]
+test_preprocessor_guarded_elif_condition_b
+[clinic start generated code]*/
+
+static PyObject *
+test_preprocessor_guarded_elif_condition_b_impl(PyObject *module)
+/*[clinic end generated code: output=615f2dee82b138d1 input=53777cebbf7fee32]*/
+#else
+/*[clinic input]
+test_preprocessor_guarded_else
+[clinic start generated code]*/
+
+static PyObject *
+test_preprocessor_guarded_else_impl(PyObject *module)
+/*[clinic end generated code: output=13af7670aac51b12 input=6657ab31d74c29fc]*/
+#endif
+
+/*[clinic input]
+dump buffer
+output pop
+[clinic start generated code]*/
+
+#if defined(CONDITION_A)
+
+PyDoc_STRVAR(test_preprocessor_guarded_condition_a__doc__,
+"test_preprocessor_guarded_condition_a($module, /)\n"
+"--\n"
+"\n");
+
+#define TEST_PREPROCESSOR_GUARDED_CONDITION_A_METHODDEF \
+ {"test_preprocessor_guarded_condition_a", (PyCFunction)test_preprocessor_guarded_condition_a, METH_NOARGS, test_preprocessor_guarded_condition_a__doc__},
+
+static PyObject *
+test_preprocessor_guarded_condition_a(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ return test_preprocessor_guarded_condition_a_impl(module);
+}
+
+#endif /* defined(CONDITION_A) */
+
+#if !defined(CONDITION_A) && (CONDITION_B)
+
+PyDoc_STRVAR(test_preprocessor_guarded_elif_condition_b__doc__,
+"test_preprocessor_guarded_elif_condition_b($module, /)\n"
+"--\n"
+"\n");
+
+#define TEST_PREPROCESSOR_GUARDED_ELIF_CONDITION_B_METHODDEF \
+ {"test_preprocessor_guarded_elif_condition_b", (PyCFunction)test_preprocessor_guarded_elif_condition_b, METH_NOARGS, test_preprocessor_guarded_elif_condition_b__doc__},
+
+static PyObject *
+test_preprocessor_guarded_elif_condition_b(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ return test_preprocessor_guarded_elif_condition_b_impl(module);
+}
+
+#endif /* !defined(CONDITION_A) && (CONDITION_B) */
+
+#if !defined(CONDITION_A) && !(CONDITION_B)
+
+PyDoc_STRVAR(test_preprocessor_guarded_else__doc__,
+"test_preprocessor_guarded_else($module, /)\n"
+"--\n"
+"\n");
+
+#define TEST_PREPROCESSOR_GUARDED_ELSE_METHODDEF \
+ {"test_preprocessor_guarded_else", (PyCFunction)test_preprocessor_guarded_else, METH_NOARGS, test_preprocessor_guarded_else__doc__},
+
+static PyObject *
+test_preprocessor_guarded_else(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ return test_preprocessor_guarded_else_impl(module);
+}
+
+#endif /* !defined(CONDITION_A) && !(CONDITION_B) */
+
+#ifndef TEST_PREPROCESSOR_GUARDED_CONDITION_A_METHODDEF
+ #define TEST_PREPROCESSOR_GUARDED_CONDITION_A_METHODDEF
+#endif /* !defined(TEST_PREPROCESSOR_GUARDED_CONDITION_A_METHODDEF) */
+
+#ifndef TEST_PREPROCESSOR_GUARDED_ELIF_CONDITION_B_METHODDEF
+ #define TEST_PREPROCESSOR_GUARDED_ELIF_CONDITION_B_METHODDEF
+#endif /* !defined(TEST_PREPROCESSOR_GUARDED_ELIF_CONDITION_B_METHODDEF) */
+
+#ifndef TEST_PREPROCESSOR_GUARDED_ELSE_METHODDEF
+ #define TEST_PREPROCESSOR_GUARDED_ELSE_METHODDEF
+#endif /* !defined(TEST_PREPROCESSOR_GUARDED_ELSE_METHODDEF) */
+/*[clinic end generated code: output=3804bb18d454038c input=3fc80c9989d2f2e1]*/
-# We import importlib *ASAP* in order to test #15386
-import importlib
-
from test.libregrtest.cmdline import _parse_args, RESOURCE_NAMES, ALL_RESOURCES
from test.libregrtest.main import main
group.add_argument('-m', '--match', metavar='PAT',
dest='match_tests', action='append',
help='match test cases and methods with glob pattern PAT')
+ group.add_argument('-i', '--ignore', metavar='PAT',
+ dest='ignore_tests', action='append',
+ help='ignore test cases and methods with glob pattern PAT')
group.add_argument('--matchfile', metavar='FILENAME',
dest='match_filename',
help='similar to --match but get patterns from a '
'text file, one pattern per line')
+ group.add_argument('--ignorefile', metavar='FILENAME',
+ dest='ignore_filename',
+ help='similar to --matchfile but it receives patterns '
+ 'from text file to ignore')
group.add_argument('-G', '--failfast', action='store_true',
help='fail as soon as a test fails (only with -v or -W)')
group.add_argument('-u', '--use', metavar='RES1,RES2,...',
findleaks=1, use_resources=None, trace=False, coverdir='coverage',
runleaks=False, huntrleaks=False, verbose2=False, print_slow=False,
random_seed=None, use_mp=None, verbose3=False, forever=False,
- header=False, failfast=False, match_tests=None, pgo=False)
+ header=False, failfast=False, match_tests=None, ignore_tests=None,
+ pgo=False)
for k, v in kwargs.items():
if not hasattr(ns, k):
raise TypeError('%r is an invalid keyword argument '
with open(ns.match_filename) as fp:
for line in fp:
ns.match_tests.append(line.strip())
+ if ns.ignore_filename:
+ if ns.ignore_tests is None:
+ ns.ignore_tests = []
+ with open(ns.ignore_filename) as fp:
+ for line in fp:
+ ns.ignore_tests.append(line.strip())
if ns.forever:
# --forever implies --failfast
ns.failfast = True
def list_cases(self):
support.verbose = False
- support.set_match_tests(self.ns.match_tests)
+ support.set_match_tests(self.ns.match_tests, self.ns.ignore_tests)
for test_name in self.selected:
abstest = get_abs_module(self.ns, test_name)
save_modules = sys.modules.keys()
- self.log("Run tests sequentially")
+ msg = "Run tests sequentially"
+ if self.ns.timeout:
+ msg += " (timeout: %s)" % format_duration(self.ns.timeout)
+ self.log(msg)
previous_test = None
for test_index, test_name in enumerate(self.tests, 1):
start_time = time.perf_counter()
try:
- support.set_match_tests(ns.match_tests)
+ support.set_match_tests(ns.match_tests, ns.ignore_tests)
support.junit_xml_list = xml_list = [] if ns.xmlpath else None
if ns.failfast:
support.failfast = True
# since if a test leaves a file open, it cannot be deleted by name (while
# there's nothing we can do about that here either, we can display the
# name of the offending test, which is a real help).
- for name in (support.TESTFN,
- "db_home",
- ):
+ for name in (support.TESTFN,):
if not os.path.exists(name):
continue
import json
import os
import queue
+import signal
import subprocess
import sys
import threading
# Time to wait until a worker completes: should be immediate
JOIN_TIMEOUT = 30.0 # seconds
+USE_PROCESS_GROUP = (hasattr(os, "setsid") and hasattr(os, "killpg"))
+
def must_stop(result, ns):
if result.result == INTERRUPTED:
# Running the child from the same working directory as regrtest's original
# invocation ensures that TEMPDIR for the child is the same when
# sysconfig.is_python_build() is true. See issue 15300.
+ kw = {}
+ if USE_PROCESS_GROUP:
+ kw['start_new_session'] = True
return subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
close_fds=(os.name != 'nt'),
- cwd=support.SAVEDCWD)
+ cwd=support.SAVEDCWD,
+ **kw)
def run_tests_worker(ns, test_name):
def __repr__(self):
info = [f'TestWorkerProcess #{self.worker_id}']
if self.is_alive():
- dt = time.monotonic() - self.start_time
- info.append("running for %s" % format_duration(dt))
+ info.append("running")
else:
info.append('stopped')
test = self.current_test_name
if test:
info.append(f'test={test}')
popen = self._popen
- if popen:
- info.append(f'pid={popen.pid}')
+ if popen is not None:
+ dt = time.monotonic() - self.start_time
+ info.extend((f'pid={self._popen.pid}',
+ f'time={format_duration(dt)}'))
return '<%s>' % ' '.join(info)
def _kill(self):
+ popen = self._popen
+ if popen is None:
+ return
+
if self._killed:
return
self._killed = True
- popen = self._popen
- if popen is None:
- return
+ if USE_PROCESS_GROUP:
+ what = f"{self} process group"
+ else:
+ what = f"{self}"
- print(f"Kill {self}", file=sys.stderr, flush=True)
+ print(f"Kill {what}", file=sys.stderr, flush=True)
try:
- popen.kill()
+ if USE_PROCESS_GROUP:
+ os.killpg(popen.pid, signal.SIGKILL)
+ else:
+ popen.kill()
+ except ProcessLookupError:
+ # popen.kill(): the process completed, the TestWorkerProcess thread
+ # read its exit status, but Popen.send_signal() read the returncode
+ # just before Popen.wait() set returncode.
+ pass
except OSError as exc:
- print_warning(f"Failed to kill {self}: {exc!r}")
+ print_warning(f"Failed to kill {what}: {exc!r}")
def stop(self):
# Method called from a different thread to stop this thread
self.current_test_name = test_name
try:
+ popen = run_test_in_subprocess(test_name, self.ns)
+
self._killed = False
- self._popen = run_test_in_subprocess(test_name, self.ns)
- popen = self._popen
+ self._popen = popen
except:
self.current_test_name = None
raise
self.output = queue.Queue()
self.pending = MultiprocessIterator(self.regrtest.tests)
if self.ns.timeout is not None:
- self.worker_timeout = self.ns.timeout * 1.5
+ # Rely on faulthandler to kill a worker process. This timouet is
+ # when faulthandler fails to kill a worker process. Give a maximum
+ # of 5 minutes to faulthandler to kill the worker.
+ self.worker_timeout = min(self.ns.timeout * 1.5,
+ self.ns.timeout + 5 * 60)
else:
self.worker_timeout = None
self.workers = None
def start_workers(self):
self.workers = [TestWorkerProcess(index, self)
for index in range(1, self.ns.use_mp + 1)]
- self.log("Run tests in parallel using %s child processes"
- % len(self.workers))
+ msg = f"Run tests in parallel using {len(self.workers)} child processes"
+ if self.ns.timeout:
+ msg += (" (timeout: %s, worker timeout: %s)"
+ % (format_duration(self.ns.timeout),
+ format_duration(self.worker_timeout)))
+ self.log(msg)
for worker in self.workers:
worker.start()
if ns.threshold is not None:
gc.set_threshold(ns.threshold)
+ suppress_msvcrt_asserts(ns.verbose and ns.verbose >= 2)
+
+ support.use_resources = ns.use_resources
+
+
+def suppress_msvcrt_asserts(verbose):
try:
import msvcrt
except ImportError:
- pass
- else:
- msvcrt.SetErrorMode(msvcrt.SEM_FAILCRITICALERRORS|
- msvcrt.SEM_NOALIGNMENTFAULTEXCEPT|
- msvcrt.SEM_NOGPFAULTERRORBOX|
- msvcrt.SEM_NOOPENFILEERRORBOX)
- try:
- msvcrt.CrtSetReportMode
- except AttributeError:
- # release build
- pass
+ return
+
+ msvcrt.SetErrorMode(msvcrt.SEM_FAILCRITICALERRORS|
+ msvcrt.SEM_NOALIGNMENTFAULTEXCEPT|
+ msvcrt.SEM_NOGPFAULTERRORBOX|
+ msvcrt.SEM_NOOPENFILEERRORBOX)
+ try:
+ msvcrt.CrtSetReportMode
+ except AttributeError:
+ # release build
+ return
+
+ for m in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]:
+ if verbose:
+ msvcrt.CrtSetReportMode(m, msvcrt.CRTDBG_MODE_FILE)
+ msvcrt.CrtSetReportFile(m, msvcrt.CRTDBG_FILE_STDERR)
else:
- for m in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]:
- if ns.verbose and ns.verbose >= 2:
- msvcrt.CrtSetReportMode(m, msvcrt.CRTDBG_MODE_FILE)
- msvcrt.CrtSetReportFile(m, msvcrt.CRTDBG_FILE_STDERR)
- else:
- msvcrt.CrtSetReportMode(m, 0)
+ msvcrt.CrtSetReportMode(m, 0)
- support.use_resources = ns.use_resources
def replace_stdout():
stderr=subprocess.PIPE,
universal_newlines=True)
version = proc.communicate()[0]
+ if proc.returncode:
+ # ignore gdb failure: test_gdb will log the error
+ return
except OSError:
return
For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if
the FreeBSD version is less than 7.2.
"""
- def decorator(func):
- @functools.wraps(func)
- def wrapper(*args, **kw):
- if platform.system() == sysname:
- version_txt = platform.release().split('-', 1)[0]
- try:
- version = tuple(map(int, version_txt.split('.')))
- except ValueError:
- pass
- else:
- if version < min_version:
- min_version_txt = '.'.join(map(str, min_version))
- raise unittest.SkipTest(
- "%s version %s or higher required, not %s"
- % (sysname, min_version_txt, version_txt))
- return func(*args, **kw)
- wrapper.min_version = min_version
- return wrapper
- return decorator
+ import platform
+ min_version_txt = '.'.join(map(str, min_version))
+ version_txt = platform.release().split('-', 1)[0]
+ if platform.system() == sysname:
+ try:
+ version = tuple(map(int, version_txt.split('.')))
+ except ValueError:
+ skip = False
+ else:
+ skip = version < min_version
+ else:
+ skip = False
+
+ return unittest.skipIf(
+ skip,
+ f"{sysname} version {min_version_txt} or higher required, not "
+ f"{version_txt}"
+ )
+
def requires_freebsd_version(*min_version):
"""Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is
# By default, don't filter tests
_match_test_func = None
-_match_test_patterns = None
+
+_accept_test_patterns = None
+_ignore_test_patterns = None
def match_test(test):
# as a full test identifier.
# Example: 'test.test_os.FileTests.test_access'.
#
- # Reject patterns which contain fnmatch patterns: '*', '?', '[...]'
- # or '[!...]'. For example, reject 'test_access*'.
+ # ignore patterns which contain fnmatch patterns: '*', '?', '[...]'
+ # or '[!...]'. For example, ignore 'test_access*'.
return ('.' in pattern) and (not re.search(r'[?*\[\]]', pattern))
-def set_match_tests(patterns):
- global _match_test_func, _match_test_patterns
+def set_match_tests(accept_patterns=None, ignore_patterns=None):
+ global _match_test_func, _accept_test_patterns, _ignore_test_patterns
- if patterns == _match_test_patterns:
- # No change: no need to recompile patterns.
- return
+ if accept_patterns is None:
+ accept_patterns = ()
+ if ignore_patterns is None:
+ ignore_patterns = ()
+
+ accept_func = ignore_func = None
+
+ if accept_patterns != _accept_test_patterns:
+ accept_patterns, accept_func = _compile_match_function(accept_patterns)
+ if ignore_patterns != _ignore_test_patterns:
+ ignore_patterns, ignore_func = _compile_match_function(ignore_patterns)
+
+ # Create a copy since patterns can be mutable and so modified later
+ _accept_test_patterns = tuple(accept_patterns)
+ _ignore_test_patterns = tuple(ignore_patterns)
+
+ if accept_func is not None or ignore_func is not None:
+ def match_function(test_id):
+ accept = True
+ ignore = False
+ if accept_func:
+ accept = accept_func(test_id)
+ if ignore_func:
+ ignore = ignore_func(test_id)
+ return accept and not ignore
+
+ _match_test_func = match_function
+
+
+def _compile_match_function(patterns):
if not patterns:
func = None
# set_match_tests(None) behaves as set_match_tests(())
func = match_test_regex
- # Create a copy since patterns can be mutable and so modified later
- _match_test_patterns = tuple(patterns)
- _match_test_func = func
-
+ return patterns, func
def run_unittest(*classes):
_osx_support._remove_universal_flags(
config_vars))
+ def test__remove_universal_flags_alternate(self):
+ # bpo-38360: also test the alternate single-argument form of -isysroot
+ config_vars = {
+ 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
+ 'LDFLAGS': '-arch ppc -arch i386 -g',
+ 'CPPFLAGS': '-I. -isysroot/Developer/SDKs/MacOSX10.4u.sdk',
+ 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
+ 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
+ '-isysroot/Developer/SDKs/MacOSX10.4u.sdk -g',
+ }
+ expected_vars = {
+ 'CFLAGS': '-fno-strict-aliasing -g -O3 ',
+ 'LDFLAGS': ' -g',
+ 'CPPFLAGS': '-I. ',
+ 'BLDSHARED': 'gcc-4.0 -bundle -g',
+ 'LDSHARED': 'gcc-4.0 -bundle -g',
+ }
+ self.add_expected_saved_initial_values(config_vars, expected_vars)
+
+ self.assertEqual(expected_vars,
+ _osx_support._remove_universal_flags(
+ config_vars))
+
def test__remove_unsupported_archs(self):
config_vars = {
'CC': 'clang',
_osx_support._check_for_unavailable_sdk(
config_vars))
+ def test__check_for_unavailable_sdk_alternate(self):
+ # bpo-38360: also test the alternate single-argument form of -isysroot
+ config_vars = {
+ 'CC': 'clang',
+ 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
+ '-isysroot/Developer/SDKs/MacOSX10.1.sdk',
+ 'LDFLAGS': '-arch ppc -arch i386 -g',
+ 'CPPFLAGS': '-I. -isysroot/Developer/SDKs/MacOSX10.1.sdk',
+ 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
+ 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
+ '-isysroot/Developer/SDKs/MacOSX10.1.sdk -g',
+ }
+ expected_vars = {
+ 'CC': 'clang',
+ 'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
+ ' ',
+ 'LDFLAGS': '-arch ppc -arch i386 -g',
+ 'CPPFLAGS': '-I. ',
+ 'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
+ 'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
+ ' -g',
+ }
+ self.add_expected_saved_initial_values(config_vars, expected_vars)
+
+ self.assertEqual(expected_vars,
+ _osx_support._check_for_unavailable_sdk(
+ config_vars))
+
def test_get_platform_osx(self):
# Note, get_platform_osx is currently tested more extensively
# indirectly by test_sysconfig and test_distutils
# be 1D, at least one format must be 'c', 'b' or 'B'.
for _tshape in gencastshapes():
for char in fmtdict['@']:
+ # Casts to _Bool are undefined if the source contains values
+ # other than 0 or 1.
+ if char == "?":
+ continue
tfmt = ('', '@')[randrange(2)] + char
tsize = struct.calcsize(tfmt)
n = prod(_tshape) * tsize
"""Tests that use a pseudo terminal to guarantee stdin and stdout are
terminals in the test environment"""
+ @staticmethod
+ def handle_sighup(signum, frame):
+ # bpo-40140: if the process is the session leader, os.close(fd)
+ # of "pid, fd = pty.fork()" can raise SIGHUP signal:
+ # just ignore the signal.
+ pass
+
def run_child(self, child, terminal_input):
+ old_sighup = signal.signal(signal.SIGHUP, self.handle_sighup)
+ try:
+ return self._run_child(child, terminal_input)
+ finally:
+ signal.signal(signal.SIGHUP, old_sighup)
+
+ def _run_child(self, child, terminal_input):
r, w = os.pipe() # Pipe test results from child back to parent
try:
pid, fd = pty.fork()
child_output = child_output.decode("ascii", "ignore")
self.fail("got %d lines in pipe but expected 2, child output was:\n%s"
% (len(lines), child_output))
+
+ # bpo-40155: Close the PTY before waiting for the child process
+ # completion, otherwise the child process hangs on AIX.
os.close(fd)
# Wait until the child process completes
self.assertRaises(TypeError, _testcapi.get_mapping_values, bad_mapping)
self.assertRaises(TypeError, _testcapi.get_mapping_items, bad_mapping)
+ def test_pynumber_tobase(self):
+ from _testcapi import pynumber_tobase
+ self.assertEqual(pynumber_tobase(123, 2), '0b1111011')
+ self.assertEqual(pynumber_tobase(123, 8), '0o173')
+ self.assertEqual(pynumber_tobase(123, 10), '123')
+ self.assertEqual(pynumber_tobase(123, 16), '0x7b')
+ self.assertEqual(pynumber_tobase(-123, 2), '-0b1111011')
+ self.assertEqual(pynumber_tobase(-123, 8), '-0o173')
+ self.assertEqual(pynumber_tobase(-123, 10), '-123')
+ self.assertEqual(pynumber_tobase(-123, 16), '-0x7b')
+ self.assertRaises(TypeError, pynumber_tobase, 123.0, 10)
+ self.assertRaises(TypeError, pynumber_tobase, '123', 10)
+ self.assertRaises(SystemError, pynumber_tobase, 123, 0)
+
class TestPendingCalls(unittest.TestCase):
'file': [b'Testing 123.\n'], 'title': ['']}
self.assertEqual(result, expected)
+ def test_parse_multipart_without_content_length(self):
+ POSTDATA = '''--JfISa01
+Content-Disposition: form-data; name="submit-name"
+
+just a string
+
+--JfISa01--
+'''
+ fp = BytesIO(POSTDATA.encode('latin1'))
+ env = {'boundary': 'JfISa01'.encode('latin1')}
+ result = cgi.parse_multipart(fp, env)
+ expected = {'submit-name': ['just a string\n']}
+ self.assertEqual(result, expected)
+
def test_parse_multipart_invalid_encoding(self):
BOUNDARY = "JfISa01"
POSTDATA = """--JfISa01
Nick Mathewson
"""
import unittest
-from test.support import is_jython
+from test import support
from codeop import compile_command, PyCF_DONT_IMPLY_DEDENT
import io
-if is_jython:
+if support.is_jython:
import sys
def unify_callables(d):
def assertValid(self, str, symbol='single'):
'''succeed iff str is a valid piece of code'''
- if is_jython:
+ if support.is_jython:
code = compile_command(str, "<input>", symbol)
self.assertTrue(code)
if symbol == "single":
av = self.assertValid
# special case
- if not is_jython:
+ if not support.is_jython:
self.assertEqual(compile_command(""),
compile("pass", "<input>", 'single',
PyCF_DONT_IMPLY_DEDENT))
self.assertNotEqual(compile_command("a = 1\n", "abc").co_filename,
compile("a = 1\n", "def", 'single').co_filename)
+ def test_warning(self):
+ # Test that the warning is only returned once.
+ with support.check_warnings((".*invalid", DeprecationWarning)) as w:
+ compile_command("'\e'")
+ self.assertEqual(len(w.warnings), 1)
if __name__ == "__main__":
unittest.main()
BrokenExecutor)
from concurrent.futures.process import BrokenProcessPool
from multiprocessing import get_context
+import multiprocessing.util
def create_future(state=PENDING, exception=None, result=None):
test.support.run_unittest(__name__)
finally:
test.support.reap_children()
+ multiprocessing.util._cleanup_tests()
if __name__ == "__main__":
test_main()
self.assertEqual(Decimal.from_float(cls(101.1)),
Decimal.from_float(101.1))
- def test_maxcontext_exact_arith(self):
-
- # Make sure that exact operations do not raise MemoryError due
- # to huge intermediate values when the context precision is very
- # large.
-
- # The following functions fill the available precision and are
- # therefore not suitable for large precisions (by design of the
- # specification).
- MaxContextSkip = ['logical_invert', 'next_minus', 'next_plus',
- 'logical_and', 'logical_or', 'logical_xor',
- 'next_toward', 'rotate', 'shift']
-
- Decimal = C.Decimal
- Context = C.Context
- localcontext = C.localcontext
-
- # Here only some functions that are likely candidates for triggering a
- # MemoryError are tested. deccheck.py has an exhaustive test.
- maxcontext = Context(prec=C.MAX_PREC, Emin=C.MIN_EMIN, Emax=C.MAX_EMAX)
- with localcontext(maxcontext):
- self.assertEqual(Decimal(0).exp(), 1)
- self.assertEqual(Decimal(1).ln(), 0)
- self.assertEqual(Decimal(1).log10(), 0)
- self.assertEqual(Decimal(10**2).log10(), 2)
- self.assertEqual(Decimal(10**223).log10(), 223)
- self.assertEqual(Decimal(10**19).logb(), 19)
- self.assertEqual(Decimal(4).sqrt(), 2)
- self.assertEqual(Decimal("40E9").sqrt(), Decimal('2.0E+5'))
- self.assertEqual(divmod(Decimal(10), 3), (3, 1))
- self.assertEqual(Decimal(10) // 3, 3)
- self.assertEqual(Decimal(4) / 2, 2)
- self.assertEqual(Decimal(400) ** -1, Decimal('0.0025'))
-
-
@requires_docstrings
@unittest.skipUnless(C, "test requires C version")
class SignatureTest(unittest.TestCase):
import os
import sys
import importlib
+import importlib.abc
+import importlib.util
import unittest
import tempfile
+import shutil
+import contextlib
# NOTE: There are some additional tests relating to interaction with
# zipimport in the test_zipimport_support test module.
>>> tests = finder.find(sample_func)
>>> print(tests) # doctest: +ELLIPSIS
- [<DocTest sample_func from ...:21 (1 example)>]
+ [<DocTest sample_func from ...:25 (1 example)>]
The exact name depends on how test_doctest was invoked, so allow for
leading path components.
>>> sys.argv = save_argv
"""
+class TestImporter(importlib.abc.MetaPathFinder, importlib.abc.ResourceLoader):
+
+ def find_spec(self, fullname, path, target=None):
+ return importlib.util.spec_from_file_location(fullname, path, loader=self)
+
+ def get_data(self, path):
+ with open(path, mode='rb') as f:
+ return f.read()
+
+class TestHook:
+
+ def __init__(self, pathdir):
+ self.sys_path = sys.path[:]
+ self.meta_path = sys.meta_path[:]
+ self.path_hooks = sys.path_hooks[:]
+ sys.path.append(pathdir)
+ sys.path_importer_cache.clear()
+ self.modules_before = sys.modules.copy()
+ self.importer = TestImporter()
+ sys.meta_path.append(self.importer)
+
+ def remove(self):
+ sys.path[:] = self.sys_path
+ sys.meta_path[:] = self.meta_path
+ sys.path_hooks[:] = self.path_hooks
+ sys.path_importer_cache.clear()
+ sys.modules.clear()
+ sys.modules.update(self.modules_before)
+
+
+@contextlib.contextmanager
+def test_hook(pathdir):
+ hook = TestHook(pathdir)
+ try:
+ yield hook
+ finally:
+ hook.remove()
+
+
def test_lineendings(): r"""
-*nix systems use \n line endings, while Windows systems use \r\n. Python
+*nix systems use \n line endings, while Windows systems use \r\n, and
+old Mac systems used \r, which Python still recognizes as a line ending. Python
handles this using universal newline mode for reading files. Let's make
sure doctest does so (issue 8473) by creating temporary test files using each
-of the two line disciplines. One of the two will be the "wrong" one for the
-platform the test is run on.
+of the three line disciplines. At least one will not match either the universal
+newline \n or os.linesep for the platform the test is run on.
Windows line endings first:
TestResults(failed=0, attempted=1)
>>> os.remove(fn)
+And finally old Mac line endings:
+
+ >>> fn = tempfile.mktemp()
+ >>> with open(fn, 'wb') as f:
+ ... f.write(b'Test:\r\r >>> x = 1 + 1\r\rDone.\r')
+ 30
+ >>> doctest.testfile(fn, module_relative=False, verbose=False)
+ TestResults(failed=0, attempted=1)
+ >>> os.remove(fn)
+
+Now we test with a package loader that has a get_data method, since that
+bypasses the standard universal newline handling so doctest has to do the
+newline conversion itself; let's make sure it does so correctly (issue 1812).
+We'll write a file inside the package that has all three kinds of line endings
+in it, and use a package hook to install a custom loader; on any platform,
+at least one of the line endings will raise a ValueError for inconsistent
+whitespace if doctest does not correctly do the newline conversion.
+
+ >>> dn = tempfile.mkdtemp()
+ >>> pkg = os.path.join(dn, "doctest_testpkg")
+ >>> os.mkdir(pkg)
+ >>> support.create_empty_file(os.path.join(pkg, "__init__.py"))
+ >>> fn = os.path.join(pkg, "doctest_testfile.txt")
+ >>> with open(fn, 'wb') as f:
+ ... f.write(
+ ... b'Test:\r\n\r\n'
+ ... b' >>> x = 1 + 1\r\n\r\n'
+ ... b'Done.\r\n'
+ ... b'Test:\n\n'
+ ... b' >>> x = 1 + 1\n\n'
+ ... b'Done.\n'
+ ... b'Test:\r\r'
+ ... b' >>> x = 1 + 1\r\r'
+ ... b'Done.\r'
+ ... )
+ 95
+ >>> with test_hook(dn):
+ ... doctest.testfile("doctest_testfile.txt", package="doctest_testpkg", verbose=False)
+ TestResults(failed=0, attempted=3)
+ >>> shutil.rmtree(dn)
+
"""
def test_testmod(): r"""
from io import StringIO, BytesIO
from itertools import chain
from random import choice
-from socket import getfqdn
from threading import Thread
+from unittest.mock import patch
import email
import email.policy
'.test-idstring@testdomain-string>')
def test_make_msgid_default_domain(self):
- self.assertTrue(
- email.utils.make_msgid().endswith(
- '@' + getfqdn() + '>'))
+ with patch('socket.getfqdn') as mock_getfqdn:
+ mock_getfqdn.return_value = domain = 'pythontest.example.com'
+ self.assertTrue(
+ email.utils.make_msgid().endswith(
+ '@' + domain + '>'))
def test_Generator_linend(self):
# Issue 14645.
{'filename': 'foo'},
[errors.InvalidHeaderDefect]),
+ 'invalid_parameter_value_with_fws_between_ew': (
+ 'attachment; filename="=?UTF-8?Q?Schulbesuchsbest=C3=A4ttigung=2E?='
+ ' =?UTF-8?Q?pdf?="',
+ 'attachment',
+ {'filename': 'Schulbesuchsbestättigung.pdf'},
+ [errors.InvalidHeaderDefect]*3,
+ ('attachment; filename="Schulbesuchsbestättigung.pdf"'),
+ ('Content-Disposition: attachment;\n'
+ ' filename*=utf-8\'\'Schulbesuchsbest%C3%A4ttigung.pdf\n'),
+ ),
+
+ 'parameter_value_with_fws_between_tokens': (
+ 'attachment; filename="File =?utf-8?q?Name?= With Spaces.pdf"',
+ 'attachment',
+ {'filename': 'File Name With Spaces.pdf'},
+ [errors.InvalidHeaderDefect],
+ 'attachment; filename="File Name With Spaces.pdf"',
+ ('Content-Disposition: attachment; filename="File Name With Spaces.pdf"\n'),
+ )
}
# with self.assertRaises(ValueError):
# Address('foo', 'wők', 'example.com')
+ def test_crlf_in_constructor_args_raises(self):
+ cases = (
+ dict(display_name='foo\r'),
+ dict(display_name='foo\n'),
+ dict(display_name='foo\r\n'),
+ dict(domain='example.com\r'),
+ dict(domain='example.com\n'),
+ dict(domain='example.com\r\n'),
+ dict(username='wok\r'),
+ dict(username='wok\n'),
+ dict(username='wok\r\n'),
+ dict(addr_spec='wok@example.com\r'),
+ dict(addr_spec='wok@example.com\n'),
+ dict(addr_spec='wok@example.com\r\n')
+ )
+ for kwargs in cases:
+ with self.subTest(kwargs=kwargs), self.assertRaisesRegex(ValueError, "invalid arguments"):
+ Address(**kwargs)
+
def test_non_ascii_username_in_addr_spec_raises(self):
with self.assertRaises(ValueError):
Address('foo', addr_spec='wők@example.com')
self.run_pip.assert_called_once_with(
[
- "install", "--no-index", "--find-links",
+ "install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "setuptools", "pip",
],
unittest.mock.ANY,
self.run_pip.assert_called_once_with(
[
- "install", "--no-index", "--find-links",
+ "install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "--root", "/foo/bar/",
"setuptools", "pip",
],
self.run_pip.assert_called_once_with(
[
- "install", "--no-index", "--find-links",
+ "install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "--user", "setuptools", "pip",
],
unittest.mock.ANY,
self.run_pip.assert_called_once_with(
[
- "install", "--no-index", "--find-links",
+ "install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "--upgrade", "setuptools", "pip",
],
unittest.mock.ANY,
self.run_pip.assert_called_once_with(
[
- "install", "--no-index", "--find-links",
+ "install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "-v", "setuptools", "pip",
],
unittest.mock.ANY,
self.run_pip.assert_called_once_with(
[
- "install", "--no-index", "--find-links",
+ "install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "-vv", "setuptools", "pip",
],
unittest.mock.ANY,
self.run_pip.assert_called_once_with(
[
- "install", "--no-index", "--find-links",
+ "install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "-vvv", "setuptools", "pip",
],
unittest.mock.ANY,
self.run_pip.assert_called_once_with(
[
- "install", "--no-index", "--find-links",
+ "install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "setuptools", "pip",
],
unittest.mock.ANY,
self.assertEqual(Color.blue.value, 2)
self.assertEqual(Color.green.value, 3)
+ def test_auto_order(self):
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ red = auto()
+ green = auto()
+ blue = auto()
+ def _generate_next_value_(name, start, count, last):
+ return name
+
+
def test_duplicate_auto(self):
class Dupes(Enum):
first = primero = auto()
eq("dict[str, int]")
eq("set[str,]")
eq("tuple[str, ...]")
+ eq("tuple[(str, *types)]")
+ eq("tuple[xx:yy, (*types,)]")
+ eq("tuple[str, int, (str, int)]")
+ eq("tuple[(*int, str, str, (str, int))]")
eq("tuple[str, int, float, dict[str, int]]")
eq("slice[0]")
eq("slice[0:1]")
eq("slice[:-1]")
eq("slice[1:]")
eq("slice[::-1]")
+ eq("slice[:,]")
+ eq("slice[1:2,]")
+ eq("slice[1:2:3,]")
+ eq("slice[1:2, 1]")
+ eq("slice[1:2, 2, 3]")
eq("slice[()]")
eq("slice[a, b:c, d:e:f]")
eq("slice[(x for x in a)]")
def get_gdb_version():
try:
- proc = subprocess.Popen(["gdb", "-nx", "--version"],
+ cmd = ["gdb", "-nx", "--version"]
+ proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
with proc:
- version = proc.communicate()[0]
+ version, stderr = proc.communicate()
+
+ if proc.returncode:
+ raise Exception(f"Command {' '.join(cmd)!r} failed "
+ f"with exit code {proc.returncode}: "
+ f"stdout={version!r} stderr={stderr!r}")
except OSError:
# This is what "no gdb" looks like. There may, however, be other
# errors that manifest this way too.
" because the Program Counter is"
" not present")
+ # bpo-40019: Skip the test if gdb failed to read debug information
+ # because the Python binary is optimized.
+ for pattern in (
+ '(frame information optimized out)',
+ 'Unable to read information on python frame',
+ ):
+ if pattern in out:
+ raise unittest.SkipTest(f"{pattern!r} found in gdb output")
+
return out
def get_gdb_repr(self, source,
thread.join()
self.assertEqual(result, b"proxied data\n")
- def test_putrequest_override_validation(self):
+ def test_putrequest_override_domain_validation(self):
"""
It should be possible to override the default validation
behavior in putrequest (bpo-38216).
conn.sock = FakeSocket('')
conn.putrequest('GET', '/\x00')
+ def test_putrequest_override_host_validation(self):
+ class UnsafeHTTPConnection(client.HTTPConnection):
+ def _validate_host(self, url):
+ pass
+
+ conn = UnsafeHTTPConnection('example.com\r\n')
+ conn.sock = FakeSocket('')
+ # set skip_host so a ValueError is not raised upon adding the
+ # invalid URL as the value of the "Host:" header
+ conn.putrequest('GET', '/', skip_host=1)
+
def test_putrequest_override_encoding(self):
"""
It should be possible to override the default encoding
@unittest.skipUnless(
support.is_resource_enabled('network'), 'network resource disabled')
+@unittest.skip('cyrus.andrew.cmu.edu blocks connections')
class RemoteIMAPTest(unittest.TestCase):
host = 'cyrus.andrew.cmu.edu'
port = 143
@unittest.skipUnless(ssl, "SSL not available")
@unittest.skipUnless(
support.is_resource_enabled('network'), 'network resource disabled')
+@unittest.skip('cyrus.andrew.cmu.edu blocks connections')
class RemoteIMAP_STARTTLSTest(RemoteIMAPTest):
def setUp(self):
@unittest.skipUnless(ssl, "SSL not available")
+@unittest.skip('cyrus.andrew.cmu.edu blocks connections')
class RemoteIMAP_SSLTest(RemoteIMAPTest):
port = 993
imap_class = IMAP4_SSL
+import errno
import os
import sys
import textwrap
import unittest
+
from subprocess import Popen, PIPE
from test import support
from test.support.script_helper import assert_python_ok
self.assertEqual(out.splitlines(),
self.expect_without_sort_keys.encode().splitlines())
self.assertEqual(err, b'')
+
+ @unittest.skipIf(sys.platform =="win32", "The test is failed with ValueError on Windows")
+ def test_broken_pipe_error(self):
+ cmd = [sys.executable, '-m', 'json.tool']
+ proc = Popen(cmd, stdout=PIPE, stdin=PIPE)
+ # bpo-39828: Closing before json.tool attempts to write into stdout.
+ proc.stdout.close()
+ proc.communicate(b'"{}"')
+ self.assertEqual(proc.returncode, errno.EPIPE)
self.fail('Failures in test_mtestfile:\n ' +
'\n '.join(failures))
+ def test_issue39871(self):
+ # A SystemError should not be raised if the first arg to atan2(),
+ # copysign(), or remainder() cannot be converted to a float.
+ class F:
+ def __float__(self):
+ self.converted = True
+ 1/0
+ for func in math.atan2, math.copysign, math.remainder:
+ y = F()
+ with self.assertRaises(TypeError):
+ func("not a number", y)
+
+ # There should not have been any attempt to convert the second
+ # argument to a float.
+ self.assertFalse(getattr(y, "converted", False))
+
# Custom assertions.
def assertIsNaN(self, value):
# We are the parent of our subprocess
self.assertEqual(int(stdout), os.getpid())
+ def check_waitpid(self, code, exitcode):
+ if sys.platform == 'win32':
+ # On Windows, os.spawnv() simply joins arguments with spaces:
+ # arguments need to be quoted
+ args = [f'"{sys.executable}"', '-c', f'"{code}"']
+ else:
+ args = [sys.executable, '-c', code]
+ pid = os.spawnv(os.P_NOWAIT, sys.executable, args)
+
+ pid2, status = os.waitpid(pid, 0)
+ if sys.platform == 'win32':
+ self.assertEqual(status, exitcode << 8)
+ else:
+ self.assertTrue(os.WIFEXITED(status), status)
+ self.assertEqual(os.WEXITSTATUS(status), exitcode)
+ self.assertEqual(pid2, pid)
+
def test_waitpid(self):
- args = [sys.executable, '-c', 'pass']
- # Add an implicit test for PyUnicode_FSConverter().
- pid = os.spawnv(os.P_NOWAIT, FakePath(args[0]), args)
- status = os.waitpid(pid, 0)
- self.assertEqual(status, (pid, 0))
+ self.check_waitpid(code='pass', exitcode=0)
+
+ def test_waitpid_exitcode(self):
+ exitcode = 23
+ code = f'import sys; sys.exit({exitcode})'
+ self.check_waitpid(code, exitcode=exitcode)
+
+ @unittest.skipUnless(sys.platform == 'win32', 'win32-specific test')
+ def test_waitpid_windows(self):
+ # bpo-40138: test os.waitpid() with exit code larger than INT_MAX.
+ STATUS_CONTROL_C_EXIT = 0xC000013A
+ code = f'import _winapi; _winapi.ExitProcess({STATUS_CONTROL_C_EXIT})'
+ self.check_waitpid(code, exitcode=STATUS_CONTROL_C_EXIT)
class SpawnTests(unittest.TestCase):
self.assertEqual(set(p.glob("dirA/../file*")), { P(BASE, "dirA/../fileA") })
self.assertEqual(set(p.glob("../xyzzy")), set())
+ @support.skip_unless_symlink
+ def test_glob_permissions(self):
+ # See bpo-38894
+ P = self.cls
+ base = P(BASE) / 'permissions'
+ base.mkdir()
+
+ file1 = base / "file1"
+ file1.touch()
+ file2 = base / "file2"
+ file2.touch()
+
+ subdir = base / "subdir"
+
+ file3 = base / "file3"
+ file3.symlink_to(subdir / "other")
+
+ # Patching is needed to avoid relying on the filesystem
+ # to return the order of the files as the error will not
+ # happen if the symlink is the last item.
+
+ with mock.patch("os.scandir") as scandir:
+ scandir.return_value = sorted(os.scandir(base))
+ self.assertEqual(len(set(base.glob("*"))), 3)
+
+ subdir.mkdir()
+
+ with mock.patch("os.scandir") as scandir:
+ scandir.return_value = sorted(os.scandir(base))
+ self.assertEqual(len(set(base.glob("*"))), 4)
+
+ subdir.chmod(000)
+
+ with mock.patch("os.scandir") as scandir:
+ scandir.return_value = sorted(os.scandir(base))
+ self.assertEqual(len(set(base.glob("*"))), 4)
def _check_resolve(self, p, expected, strict=True):
q = p.resolve(strict)
self.addCleanup(signal.signal, signal.SIGALRM, old_alarm)
old_sighup = signal.signal(signal.SIGHUP, self.handle_sighup)
- self.addCleanup(signal.signal, signal.SIGHUP, old_alarm)
+ self.addCleanup(signal.signal, signal.SIGHUP, old_sighup)
# isatty() and close() can hang on some platforms. Set an alarm
# before running the test to make sure we don't hang forever.
self.fail("isatty hung")
@staticmethod
- def handle_sighup(sig, frame):
- # if the process is the session leader, os.close(master_fd)
+ def handle_sighup(signum, frame):
+ # bpo-38547: if the process is the session leader, os.close(master_fd)
# of "master_fd, slave_name = pty.master_open()" raises SIGHUP
# signal: just ignore the signal.
pass
self.assertEqual(e[3], e.pw_gid)
self.assertIsInstance(e.pw_gid, int)
self.assertEqual(e[4], e.pw_gecos)
- self.assertIsInstance(e.pw_gecos, str)
+ self.assertIn(type(e.pw_gecos), (str, type(None)))
self.assertEqual(e[5], e.pw_dir)
self.assertIsInstance(e.pw_dir, str)
self.assertEqual(e[6], e.pw_shell)
self.assertTrue(ns.single)
self.checkError([opt, '-f', 'foo'], "don't go together")
+ def test_ignore(self):
+ for opt in '-i', '--ignore':
+ with self.subTest(opt=opt):
+ ns = libregrtest._parse_args([opt, 'pattern'])
+ self.assertEqual(ns.ignore_tests, ['pattern'])
+ self.checkError([opt], 'expected one argument')
+
+ self.addCleanup(support.unlink, support.TESTFN)
+ with open(support.TESTFN, "w") as fp:
+ print('matchfile1', file=fp)
+ print('matchfile2', file=fp)
+
+ filename = os.path.abspath(support.TESTFN)
+ ns = libregrtest._parse_args(['-m', 'match',
+ '--ignorefile', filename])
+ self.assertEqual(ns.ignore_tests,
+ ['matchfile1', 'matchfile2'])
+
def test_match(self):
for opt in '-m', '--match':
with self.subTest(opt=opt):
regex = re.compile("^(test[^ ]+).*ok$", flags=re.MULTILINE)
return [match.group(1) for match in regex.finditer(output)]
+ def test_ignorefile(self):
+ code = textwrap.dedent("""
+ import unittest
+
+ class Tests(unittest.TestCase):
+ def test_method1(self):
+ pass
+ def test_method2(self):
+ pass
+ def test_method3(self):
+ pass
+ def test_method4(self):
+ pass
+ """)
+ all_methods = ['test_method1', 'test_method2',
+ 'test_method3', 'test_method4']
+ testname = self.create_test(code=code)
+
+ # only run a subset
+ filename = support.TESTFN
+ self.addCleanup(support.unlink, filename)
+
+ subset = [
+ # only ignore the method name
+ 'test_method1',
+ # ignore the full identifier
+ '%s.Tests.test_method3' % testname]
+ with open(filename, "w") as fp:
+ for name in subset:
+ print(name, file=fp)
+
+ output = self.run_tests("-v", "--ignorefile", filename, testname)
+ methods = self.parse_methods(output)
+ subset = ['test_method2', 'test_method4']
+ self.assertEqual(methods, subset)
+
def test_matchfile(self):
code = textwrap.dedent("""
import unittest
from test.support import (captured_stderr, TESTFN, EnvironmentVarGuard,
change_cwd)
import builtins
+import glob
import os
import sys
import re
class StartupImportTests(unittest.TestCase):
def test_startup_imports(self):
+ # Get sys.path in isolated mode (python3 -I)
+ popen = subprocess.Popen([sys.executable, '-I', '-c',
+ 'import sys; print(repr(sys.path))'],
+ stdout=subprocess.PIPE,
+ encoding='utf-8')
+ stdout = popen.communicate()[0]
+ self.assertEqual(popen.returncode, 0, repr(stdout))
+ isolated_paths = eval(stdout)
+
+ # bpo-27807: Even with -I, the site module executes all .pth files
+ # found in sys.path (see site.addpackage()). Skip the test if at least
+ # one .pth file is found.
+ for path in isolated_paths:
+ pth_files = glob.glob(os.path.join(path, "*.pth"))
+ if pth_files:
+ self.skipTest(f"found {len(pth_files)} .pth files in: {path}")
+
# This tests checks which modules are loaded by Python when it
# initially starts upon startup.
popen = subprocess.Popen([sys.executable, '-I', '-v', '-c',
stderr=subprocess.PIPE,
encoding='utf-8')
stdout, stderr = popen.communicate()
+ self.assertEqual(popen.returncode, 0, (stdout, stderr))
modules = eval(stdout)
self.assertIn('site', modules)
def get_cid():
if fcntl is None:
return None
+ if not hasattr(socket, 'IOCTL_VM_SOCKETS_GET_LOCAL_CID'):
+ return None
try:
with open("/dev/vsock", "rb") as f:
r = fcntl.ioctl(f, socket.IOCTL_VM_SOCKETS_GET_LOCAL_CID, " ")
s2 = struct.Struct(s.format.encode())
self.assertEqual(s2.format, s.format)
+ def test_issue35714(self):
+ # Embedded null characters should not be allowed in format strings.
+ for s in '\0', '2\0i', b'\0':
+ with self.assertRaisesRegex(struct.error,
+ 'embedded null character'):
+ struct.calcsize(s)
+
class UnpackIteratorTest(unittest.TestCase):
"""
# on adding even when the environment in exec is empty.
# Gentoo sandboxes also force LD_PRELOAD and SANDBOX_* to exist.
return ('VERSIONER' in n or '__CF' in n or # MacOS
- '__PYVENV_LAUNCHER__' in n or # MacOS framework build
n == 'LD_PRELOAD' or n.startswith('SANDBOX') or # Gentoo
n == 'LC_CTYPE') # Locale coercion triggered
test_access = Test('test.test_os.FileTests.test_access')
test_chdir = Test('test.test_os.Win32ErrorTests.test_chdir')
+ # Test acceptance
with support.swap_attr(support, '_match_test_func', None):
# match all
support.set_match_tests([])
self.assertTrue(support.match_test(test_chdir))
# match all using None
- support.set_match_tests(None)
+ support.set_match_tests(None, None)
self.assertTrue(support.match_test(test_access))
self.assertTrue(support.match_test(test_chdir))
# match the full test identifier
- support.set_match_tests([test_access.id()])
+ support.set_match_tests([test_access.id()], None)
self.assertTrue(support.match_test(test_access))
self.assertFalse(support.match_test(test_chdir))
# match the module name
- support.set_match_tests(['test_os'])
+ support.set_match_tests(['test_os'], None)
self.assertTrue(support.match_test(test_access))
self.assertTrue(support.match_test(test_chdir))
# Test '*' pattern
- support.set_match_tests(['test_*'])
+ support.set_match_tests(['test_*'], None)
self.assertTrue(support.match_test(test_access))
self.assertTrue(support.match_test(test_chdir))
# Test case sensitivity
- support.set_match_tests(['filetests'])
+ support.set_match_tests(['filetests'], None)
self.assertFalse(support.match_test(test_access))
- support.set_match_tests(['FileTests'])
+ support.set_match_tests(['FileTests'], None)
self.assertTrue(support.match_test(test_access))
# Test pattern containing '.' and a '*' metacharacter
- support.set_match_tests(['*test_os.*.test_*'])
+ support.set_match_tests(['*test_os.*.test_*'], None)
self.assertTrue(support.match_test(test_access))
self.assertTrue(support.match_test(test_chdir))
# Multiple patterns
- support.set_match_tests([test_access.id(), test_chdir.id()])
+ support.set_match_tests([test_access.id(), test_chdir.id()], None)
self.assertTrue(support.match_test(test_access))
self.assertTrue(support.match_test(test_chdir))
- support.set_match_tests(['test_access', 'DONTMATCH'])
+ support.set_match_tests(['test_access', 'DONTMATCH'], None)
self.assertTrue(support.match_test(test_access))
self.assertFalse(support.match_test(test_chdir))
+ # Test rejection
+ with support.swap_attr(support, '_match_test_func', None):
+ # match all
+ support.set_match_tests(ignore_patterns=[])
+ self.assertTrue(support.match_test(test_access))
+ self.assertTrue(support.match_test(test_chdir))
+
+ # match all using None
+ support.set_match_tests(None, None)
+ self.assertTrue(support.match_test(test_access))
+ self.assertTrue(support.match_test(test_chdir))
+
+ # match the full test identifier
+ support.set_match_tests(None, [test_access.id()])
+ self.assertFalse(support.match_test(test_access))
+ self.assertTrue(support.match_test(test_chdir))
+
+ # match the module name
+ support.set_match_tests(None, ['test_os'])
+ self.assertFalse(support.match_test(test_access))
+ self.assertFalse(support.match_test(test_chdir))
+
+ # Test '*' pattern
+ support.set_match_tests(None, ['test_*'])
+ self.assertFalse(support.match_test(test_access))
+ self.assertFalse(support.match_test(test_chdir))
+
+ # Test case sensitivity
+ support.set_match_tests(None, ['filetests'])
+ self.assertTrue(support.match_test(test_access))
+ support.set_match_tests(None, ['FileTests'])
+ self.assertFalse(support.match_test(test_access))
+
+ # Test pattern containing '.' and a '*' metacharacter
+ support.set_match_tests(None, ['*test_os.*.test_*'])
+ self.assertFalse(support.match_test(test_access))
+ self.assertFalse(support.match_test(test_chdir))
+
+ # Multiple patterns
+ support.set_match_tests(None, [test_access.id(), test_chdir.id()])
+ self.assertFalse(support.match_test(test_access))
+ self.assertFalse(support.match_test(test_chdir))
+
+ support.set_match_tests(None, ['test_access', 'DONTMATCH'])
+ self.assertFalse(support.match_test(test_access))
+ self.assertTrue(support.match_test(test_chdir))
+
def test_fd_count(self):
# We cannot test the absolute value of fd_count(): on old Linux
# kernel or glibc versions, os.urandom() keeps a FD open on
self.assertTrue(self.spam.lookup("bar").is_declared_global())
self.assertFalse(self.internal.lookup("x").is_global())
self.assertFalse(self.Mine.lookup("instance_var").is_global())
+ self.assertTrue(self.spam.lookup("bar").is_global())
def test_local(self):
self.assertTrue(self.spam.lookup("x").is_local())
- self.assertFalse(self.internal.lookup("x").is_local())
+ self.assertFalse(self.spam.lookup("bar").is_local())
+
+ def test_free(self):
+ self.assertTrue(self.internal.lookup("x").is_free())
def test_referenced(self):
self.assertTrue(self.internal.lookup("x").is_referenced())
# Verify writelines with a SpooledTemporaryFile
f = self.do_create()
f.writelines((b'x', b'y', b'z'))
- f.seek(0)
+ pos = f.seek(0)
+ self.assertEqual(pos, 0)
buf = f.read()
self.assertEqual(buf, b'xyz')
# when that occurs
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
- f.seek(100, 0)
+ pos = f.seek(100, 0)
+ self.assertEqual(pos, 100)
self.assertFalse(f._rolled)
f.write(b'x')
self.assertTrue(f._rolled)
self.check_roundtrip(r"""{**{'y': 2}, 'x': 1}""")
self.check_roundtrip(r"""{**{'y': 2}, **{'x': 1}}""")
+ def test_subscript(self):
+ self.check_roundtrip("a[i]")
+ self.check_roundtrip("a[i,]")
+ self.check_roundtrip("a[i, j]")
+ self.check_roundtrip("a[()]")
+ self.check_roundtrip("a[i:j]")
+ self.check_roundtrip("a[:j]")
+ self.check_roundtrip("a[i:]")
+ self.check_roundtrip("a[i:j:k]")
+ self.check_roundtrip("a[:j:k]")
+ self.check_roundtrip("a[i::k]")
+ self.check_roundtrip("a[i:j,]")
+ self.check_roundtrip("a[i:j, k]")
+
class DirectoryTestCase(ASTTestCase):
"""Test roundtrip behaviour on all files in Lib and Lib/test."""
with self.assertRaises(TypeError):
TypeVar('X', str, float, bound=Employee)
+ def test_missing__name__(self):
+ # See bpo-39942
+ code = ("import typing\n"
+ "T = typing.TypeVar('T')\n"
+ )
+ exec(code, {})
+
def test_no_bivariant(self):
with self.assertRaises(ValueError):
TypeVar('T', covariant=True, contravariant=True)
self.unfakehttp()
@unittest.skipUnless(ssl, "ssl module required")
- def test_url_with_control_char_rejected(self):
+ def test_url_path_with_control_char_rejected(self):
for char_no in list(range(0, 0x21)) + [0x7f]:
char = chr(char_no)
schemeless_url = f"//localhost:7777/test{char}/"
self.unfakehttp()
@unittest.skipUnless(ssl, "ssl module required")
- def test_url_with_newline_header_injection_rejected(self):
+ def test_url_path_with_newline_header_injection_rejected(self):
self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123"
schemeless_url = "//" + host + ":8080/test/?test=a"
finally:
self.unfakehttp()
+ @unittest.skipUnless(ssl, "ssl module required")
+ def test_url_host_with_control_char_rejected(self):
+ for char_no in list(range(0, 0x21)) + [0x7f]:
+ char = chr(char_no)
+ schemeless_url = f"//localhost{char}/test/"
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
+ try:
+ escaped_char_repr = repr(char).replace('\\', r'\\')
+ InvalidURL = http.client.InvalidURL
+ with self.assertRaisesRegex(
+ InvalidURL, f"contain control.*{escaped_char_repr}"):
+ urlopen(f"http:{schemeless_url}")
+ with self.assertRaisesRegex(InvalidURL, f"contain control.*{escaped_char_repr}"):
+ urlopen(f"https:{schemeless_url}")
+ finally:
+ self.unfakehttp()
+
+ @unittest.skipUnless(ssl, "ssl module required")
+ def test_url_host_with_newline_header_injection_rejected(self):
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.")
+ host = "localhost\r\nX-injected: header\r\n"
+ schemeless_url = "//" + host + ":8080/test/?test=a"
+ try:
+ InvalidURL = http.client.InvalidURL
+ with self.assertRaisesRegex(
+ InvalidURL, r"contain control.*\\r"):
+ urlopen(f"http:{schemeless_url}")
+ with self.assertRaisesRegex(InvalidURL, r"contain control.*\\n"):
+ urlopen(f"https:{schemeless_url}")
+ finally:
+ self.unfakehttp()
+
def test_read_0_9(self):
# "0.9" response accepted (but not "simple responses" without
# a status line)
bypass = {'exclude_simple': True, 'exceptions': []}
self.assertTrue(_proxy_bypass_macosx_sysconf('test', bypass))
- def test_basic_auth(self, quote_char='"'):
- opener = OpenerDirector()
- password_manager = MockPasswordManager()
- auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
- realm = "ACME Widget Store"
- http_handler = MockHTTPHandler(
- 401, 'WWW-Authenticate: Basic realm=%s%s%s\r\n\r\n' %
- (quote_char, realm, quote_char))
- opener.add_handler(auth_handler)
- opener.add_handler(http_handler)
- self._test_basic_auth(opener, auth_handler, "Authorization",
- realm, http_handler, password_manager,
- "http://acme.example.com/protected",
- "http://acme.example.com/protected",
- )
-
- def test_basic_auth_with_single_quoted_realm(self):
- self.test_basic_auth(quote_char="'")
-
- def test_basic_auth_with_unquoted_realm(self):
- opener = OpenerDirector()
- password_manager = MockPasswordManager()
- auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
- realm = "ACME Widget Store"
- http_handler = MockHTTPHandler(
- 401, 'WWW-Authenticate: Basic realm=%s\r\n\r\n' % realm)
- opener.add_handler(auth_handler)
- opener.add_handler(http_handler)
- with self.assertWarns(UserWarning):
+ def check_basic_auth(self, headers, realm):
+ with self.subTest(realm=realm, headers=headers):
+ opener = OpenerDirector()
+ password_manager = MockPasswordManager()
+ auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
+ body = '\r\n'.join(headers) + '\r\n\r\n'
+ http_handler = MockHTTPHandler(401, body)
+ opener.add_handler(auth_handler)
+ opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Authorization",
- realm, http_handler, password_manager,
- "http://acme.example.com/protected",
- "http://acme.example.com/protected",
- )
+ realm, http_handler, password_manager,
+ "http://acme.example.com/protected",
+ "http://acme.example.com/protected")
+
+ def test_basic_auth(self):
+ realm = "realm2@example.com"
+ realm2 = "realm2@example.com"
+ basic = f'Basic realm="{realm}"'
+ basic2 = f'Basic realm="{realm2}"'
+ other_no_realm = 'Otherscheme xxx'
+ digest = (f'Digest realm="{realm2}", '
+ f'qop="auth, auth-int", '
+ f'nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", '
+ f'opaque="5ccc069c403ebaf9f0171e9517f40e41"')
+ for realm_str in (
+ # test "quote" and 'quote'
+ f'Basic realm="{realm}"',
+ f"Basic realm='{realm}'",
+
+ # charset is ignored
+ f'Basic realm="{realm}", charset="UTF-8"',
+
+ # Multiple challenges per header
+ f'{basic}, {basic2}',
+ f'{basic}, {other_no_realm}',
+ f'{other_no_realm}, {basic}',
+ f'{basic}, {digest}',
+ f'{digest}, {basic}',
+ ):
+ headers = [f'WWW-Authenticate: {realm_str}']
+ self.check_basic_auth(headers, realm)
+
+ # no quote: expect a warning
+ with support.check_warnings(("Basic Auth Realm was unquoted",
+ UserWarning)):
+ headers = [f'WWW-Authenticate: Basic realm={realm}']
+ self.check_basic_auth(headers, realm)
+
+ # Multiple headers: one challenge per header.
+ # Use the first Basic realm.
+ for challenges in (
+ [basic, basic2],
+ [basic, digest],
+ [digest, basic],
+ ):
+ headers = [f'WWW-Authenticate: {challenge}'
+ for challenge in challenges]
+ self.check_basic_auth(headers, realm)
def test_proxy_basic_auth(self):
opener = OpenerDirector()
self.assertEqual(err, "".encode())
+ @unittest.skipUnless(sys.platform == 'darwin', 'only relevant on macOS')
+ def test_macos_env(self):
+ rmtree(self.env_dir)
+ builder = venv.EnvBuilder()
+ builder.create(self.env_dir)
+
+ envpy = os.path.join(os.path.realpath(self.env_dir),
+ self.bindir, self.exe)
+ out, err = check_output([envpy, '-c',
+ 'import os; print("__PYVENV_LAUNCHER__" in os.environ)'])
+ self.assertEqual(out.strip(), 'False'.encode())
+
@requireVenvCreate
class EnsurePipTest(BaseTest):
"""Test venv module installation of pip."""
# executing pip with sudo, you may want sudo's -H flag."
# where $HOME is replaced by the HOME environment variable.
err = re.sub("^(WARNING: )?The directory .* or its parent directory "
- "is not owned by the current user .*$", "",
+ "is not owned or is not writable by the current user.*$", "",
err, flags=re.MULTILINE)
self.assertEqual(err.rstrip(), "")
# Being fairly specific regarding the expected behaviour for the
elem.tail = X()
elem.__setstate__({'tag': 42}) # shouldn't cause an assertion failure
+ @support.cpython_only
+ def test_uninitialized_parser(self):
+ # The interpreter shouldn't crash in case of calling methods or
+ # accessing attributes of uninitialized XMLParser objects.
+ parser = cET.XMLParser.__new__(cET.XMLParser)
+ self.assertRaises(ValueError, parser.close)
+ self.assertRaises(ValueError, parser.feed, 'foo')
+ class MockFile:
+ def read(*args):
+ return ''
+ self.assertRaises(ValueError, parser._parse_whole, MockFile())
+ self.assertRaises(ValueError, parser._setevents, None)
+ with self.assertRaises(ValueError):
+ parser.entity
+ with self.assertRaises(ValueError):
+ parser.target
+
def test_setstate_leaks(self):
# Test reference leaks
elem = cET.Element.__new__(cET.Element)
# fork() only copied the current thread; clear references to others.
new_active = {}
- current = current_thread()
+
+ try:
+ current = _active[get_ident()]
+ except KeyError:
+ # fork() was called in a thread which was not spawned
+ # by threading.Thread. For example, a thread spawned
+ # by thread.start_new_thread().
+ current = _MainThread()
+
_main_thread = current
# reset _shutdown() locks: threads re-register their _tstate_lock below
treated similarly.
If -n is not given, a suitable number of loops is calculated by trying
-successive powers of 10 until the total time is at least 0.2 seconds.
+increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the
+total time is at least 0.2 seconds.
Note: there is a certain baseline overhead associated with executing a
pass statement. It differs between versions. The code here doesn't try
The return value is a generator of strings, each ending in a newline.
Normally, the generator emits a single string; however, for
- SyntaxError exceptions, it emites several lines that (when
+ SyntaxError exceptions, it emits several lines that (when
printed) display detailed information about where the syntax
error occurred.
self.stop_btn.config(state=stop,
bg="#d00" if stop == NORMAL else "#fca")
self.clear_btn.config(state=clear,
- bg="#d00" if clear == NORMAL else"#fca")
+ bg="#d00" if clear == NORMAL else "#fca")
self.output_lbl.config(text=txt, fg=color)
def makeLoadDemoMenu(self, master):
self.__bound__ = _type_check(bound, "Bound must be a type.")
else:
self.__bound__ = None
- def_mod = sys._getframe(1).f_globals['__name__'] # for pickling
+ try:
+ def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') # for pickling
+ except (AttributeError, ValueError):
+ def_mod = None
if def_mod != 'typing':
self.__module__ = def_mod
def __enter__(self):
# The __warningregistry__'s need to be in a pristine state for tests
# to work properly.
- for v in sys.modules.values():
+ for v in list(sys.modules.values()):
if getattr(v, '__warningregistry__', None):
v.__warningregistry__ = {}
self.warnings_manager = warnings.catch_warnings(record=True)
import pprint
import sys
import builtins
+import contextlib
from types import ModuleType, MethodType
from functools import wraps, partial
@wraps(func)
def patched(*args, **keywargs):
extra_args = []
- entered_patchers = []
-
- exc_info = tuple()
- try:
+ with contextlib.ExitStack() as exit_stack:
for patching in patched.patchings:
- arg = patching.__enter__()
- entered_patchers.append(patching)
+ arg = exit_stack.enter_context(patching)
if patching.attribute_name is not None:
keywargs.update(arg)
elif patching.new is DEFAULT:
args += tuple(extra_args)
return func(*args, **keywargs)
- except:
- if (patching not in entered_patchers and
- _is_started(patching)):
- # the patcher may have been started, but an exception
- # raised whilst entering one of its additional_patchers
- entered_patchers.append(patching)
- # Pass the exception to __exit__
- exc_info = sys.exc_info()
- # re-raise the exception
- raise
- finally:
- for patching in reversed(entered_patchers):
- patching.__exit__(*exc_info)
patched.patchings = [self]
return patched
self.temp_original = original
self.is_local = local
- setattr(self.target, self.attribute, new_attr)
- if self.attribute_name is not None:
- extra_args = {}
- if self.new is DEFAULT:
- extra_args[self.attribute_name] = new
- for patching in self.additional_patchers:
- arg = patching.__enter__()
- if patching.new is DEFAULT:
- extra_args.update(arg)
- return extra_args
-
- return new
-
+ self._exit_stack = contextlib.ExitStack()
+ try:
+ setattr(self.target, self.attribute, new_attr)
+ if self.attribute_name is not None:
+ extra_args = {}
+ if self.new is DEFAULT:
+ extra_args[self.attribute_name] = new
+ for patching in self.additional_patchers:
+ arg = self._exit_stack.enter_context(patching)
+ if patching.new is DEFAULT:
+ extra_args.update(arg)
+ return extra_args
+
+ return new
+ except:
+ if not self.__exit__(*sys.exc_info()):
+ raise
def __exit__(self, *exc_info):
"""Undo the patch."""
del self.temp_original
del self.is_local
del self.target
- for patcher in reversed(self.additional_patchers):
- if _is_started(patcher):
- patcher.__exit__(*exc_info)
+ exit_stack = self._exit_stack
+ del self._exit_stack
+ return exit_stack.__exit__(*exc_info)
def start(self):
# If the patch hasn't been started this will fail
pass
- return self.__exit__()
+ return self.__exit__(None, None, None)
import warnings
import weakref
import inspect
+import types
from copy import deepcopy
from test import support
pass
self.assertRaises(TypeError, self.assertWarnsRegex, MyWarn, lambda: True)
+ def testAssertWarnsModifySysModules(self):
+ # bpo-29620: handle modified sys.modules during iteration
+ class Foo(types.ModuleType):
+ @property
+ def __warningregistry__(self):
+ sys.modules['@bar@'] = 'bar'
+
+ sys.modules['@foo@'] = Foo('foo')
+ try:
+ self.assertWarns(UserWarning, warnings.warn, 'expected')
+ finally:
+ del sys.modules['@foo@']
+ del sys.modules['@bar@']
+
def testAssertRaisesRegexMismatch(self):
def Stub():
raise Exception('Unexpected')
# allow for double- and single-quoted realm values
# (single quotes are a violation of the RFC, but appear in the wild)
- rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
- 'realm=(["\']?)([^"\']*)\\2', re.I)
+ rx = re.compile('(?:^|,)' # start of the string or ','
+ '[ \t]*' # optional whitespaces
+ '([^ \t]+)' # scheme like "Basic"
+ '[ \t]+' # mandatory whitespaces
+ # realm=xxx
+ # realm='xxx'
+ # realm="xxx"
+ 'realm=(["\']?)([^"\']*)\\2',
+ re.I)
# XXX could pre-emptively send auth info already accepted (RFC 2617,
# end of section 2, and section 1.2 immediately after "credentials"
self.passwd = password_mgr
self.add_password = self.passwd.add_password
+ def _parse_realm(self, header):
+ # parse WWW-Authenticate header: accept multiple challenges per header
+ found_challenge = False
+ for mo in AbstractBasicAuthHandler.rx.finditer(header):
+ scheme, quote, realm = mo.groups()
+ if quote not in ['"', "'"]:
+ warnings.warn("Basic Auth Realm was unquoted",
+ UserWarning, 3)
+
+ yield (scheme, realm)
+
+ found_challenge = True
+
+ if not found_challenge:
+ if header:
+ scheme = header.split()[0]
+ else:
+ scheme = ''
+ yield (scheme, None)
+
def http_error_auth_reqed(self, authreq, host, req, headers):
# host may be an authority (without userinfo) or a URL with an
# authority
- # XXX could be multiple headers
- authreq = headers.get(authreq, None)
+ headers = headers.get_all(authreq)
+ if not headers:
+ # no header found
+ return
- if authreq:
- scheme = authreq.split()[0]
- if scheme.lower() != 'basic':
- raise ValueError("AbstractBasicAuthHandler does not"
- " support the following scheme: '%s'" %
- scheme)
- else:
- mo = AbstractBasicAuthHandler.rx.search(authreq)
- if mo:
- scheme, quote, realm = mo.groups()
- if quote not in ['"',"'"]:
- warnings.warn("Basic Auth Realm was unquoted",
- UserWarning, 2)
- if scheme.lower() == 'basic':
- return self.retry_http_basic_auth(host, req, realm)
+ unsupported = None
+ for header in headers:
+ for scheme, realm in self._parse_realm(header):
+ if scheme.lower() != 'basic':
+ unsupported = scheme
+ continue
+
+ if realm is not None:
+ # Use the first matching Basic challenge.
+ # Ignore following challenges even if they use the Basic
+ # scheme.
+ return self.retry_http_basic_auth(host, req, realm)
+
+ if unsupported is not None:
+ raise ValueError("AbstractBasicAuthHandler does not "
+ "support the following scheme: %r"
+ % (scheme,))
def retry_http_basic_auth(self, host, req, realm):
user, pw = self.passwd.find_user_password(realm, host)
# instead of "from webbrowser import *".
def open(url, new=0, autoraise=True):
+ """Display url using the default browser.
+
+ If possible, open url in a location determined by new.
+ - 0: the same browser window (the default).
+ - 1: a new browser window.
+ - 2: a new browser page ("tab").
+ If possible, autoraise raises the window (the default) or not.
+ """
if _tryorder is None:
with _lock:
if _tryorder is None:
return False
def open_new(url):
+ """Open url in a new window of the default browser.
+
+ If not possible, then open url in the only browser window.
+ """
return open(url, 1)
def open_new_tab(url):
+ """Open url in a new page ("tab") of the default browser.
+
+ If not possible, then the behavior becomes equivalent to open_new().
+ """
return open(url, 2)
register(browser, None, BackgroundBrowser(browser))
else:
# Prefer X browsers if present
- if os.environ.get("DISPLAY"):
+ if os.environ.get("DISPLAY") or os.environ.get("WAYLAND_DISPLAY"):
try:
cmd = "xdg-settings get default-web-browser".split()
raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
$DESTROOT, massages that installation to remove .pyc files and such, creates
an Installer package from the installation plus other files in ``resources``
and ``scripts`` and placed that on a ``.dmg`` disk image.
-
-For Python 3.4.0, PSF practice is to build two installer variants
-for each release.
-
-1. 32-bit-only, i386 and PPC universal, capable on running on all machines
- supported by Mac OS X 10.5 through (at least) 10.9::
-
- /path/to/bootstrap/python2.7 build-installer.py \
- --sdk-path=/Developer/SDKs/MacOSX10.5.sdk \
- --universal-archs=32-bit \
- --dep-target=10.5
-
- - builds the following third-party libraries
-
- * NCurses 5.9 (http://bugs.python.org/issue15037)
- * SQLite 3.8.11
- * XZ 5.0.5
-
- - uses system-supplied versions of third-party libraries
-
- * readline module links with Apple BSD editline (libedit)
-
- - requires ActiveState ``Tcl/Tk 8.4`` (currently 8.4.20) to be installed for building
-
- - recommended build environment:
-
- * Mac OS X 10.5.8 Intel or PPC
- * Xcode 3.1.4
- * ``MacOSX10.5`` SDK
- * ``MACOSX_DEPLOYMENT_TARGET=10.5``
- * Apple ``gcc-4.2``
- * bootstrap non-framework Python 2.7 for documentation build with
- Sphinx (as of 3.4.1)
-
- - alternate build environments:
-
- * Mac OS X 10.6.8 with Xcode 3.2.6
- - need to change ``/System/Library/Frameworks/{Tcl,Tk}.framework/Version/Current`` to ``8.4``
- * Note Xcode 4.* does not support building for PPC so cannot be used for this build
-
-2. 64-bit / 32-bit, x86_64 and i386 universal, for OS X 10.6 (and later)::
+The installer package built on the dmg is a macOS bundle format installer
+package. This format is deprecated and is no longer supported by modern
+macOS systems; it is usable on macOS 10.6 and earlier systems.
+To be usable on newer versions of macOS, the bits in the bundle package
+must be assembled in a macOS flat installer package, using current
+versions of the pkgbuild and productbuild utilities. To pass macoS
+Gatekeeper download quarantine, the final package must be signed
+with a valid Apple Developer ID certificate using productsign.
+Starting with macOS 10.15 Catalina, Gatekeeper now also requires
+that installer packages are submitted to and pass Apple's automated
+notarization service using the altool command. To pass notarization,
+the binaries included in the package must be built with at least
+the macOS 10.9 SDK, mout now be signed with the codesign utility
+and executables must opt in to the hardened run time option with
+any necessary entitlements. Details of these processes are
+available in the on-line Apple Developer Documentation and man pages.
+
+As of 3.8.0 and 3.7.7, PSF practice is to build one installer variants
+for each release. Note that as of this writing, no Pythons support
+building on a newer version of macOS that will run on older versions
+by setting MACOSX_DEPLOYMENT_TARGET. This is because the various
+Python C modules do not yet support runtime testing of macOS
+feature availability (for example, by using macOS AvailabilityMacros.h
+and weak-linking). To build a Python that is to be used on a
+range of macOS releases, always build on the oldest release to be
+supported; the necessary shared libraries for that release will
+normally also be available on later systems, with the occasional
+exception such as the removal of 32-bit libraries in macOS 10.15.
+
+build-installer requires Apple Developer tools, either from the
+Command Line Tools package or from a full Xcode installation.
+You should use the most recent version of either for the operating
+system version in use. (One notable exception: on macOS 10.6,
+Snow Leopard, use Xcode 3, not Xcode 4 which was released later
+in the 10.6 support cycle.)
+
+1. 64-bit, x86_64, for OS X 10.9 (and later)::
/path/to/bootstrap/python2.7 build-installer.py \
- --sdk-path=/Developer/SDKs/MacOSX10.6.sdk \
- --universal-archs=intel \
- --dep-target=10.6
+ --universal-archs=intel-64 \
+ --dep-target=10.9
- builds the following third-party libraries
- * NCurses 5.9 (http://bugs.python.org/issue15037)
- * SQLite 3.8.11
- * XZ 5.0.5
+ * OpenSSL 1.1.1
+ * Tcl/Tk 8.6
+ * NCurses
+ * SQLite
+ * XZ
+ * libffi
- uses system-supplied versions of third-party libraries
* readline module links with Apple BSD editline (libedit)
-
- - requires ActiveState Tcl/Tk 8.5.15.1 (or later) to be installed for building
-
- - recommended build environment:
-
- * Mac OS X 10.6.8 (or later)
- * Xcode 3.2.6
- * ``MacOSX10.6`` SDK
- * ``MACOSX_DEPLOYMENT_TARGET=10.6``
- * Apple ``gcc-4.2``
- * bootstrap non-framework Python 2.7 for documentation build with
- Sphinx (as of 3.4.1)
-
- - alternate build environments:
-
- * none. Xcode 4.x currently supplies two C compilers.
- ``llvm-gcc-4.2.1`` has been found to miscompile Python 3.3.x and
- produce a non-functional Python executable. As it appears to be
- considered a migration aid by Apple and is not likely to be fixed,
- its use should be avoided. The other compiler, ``clang``, has been
- undergoing rapid development. While it appears to have become
- production-ready in the most recent Xcode 5 releases, the versions
- available on the deprecated Xcode 4.x for 10.6 were early releases
- and did not receive the level of exposure in production environments
- that the Xcode 3 gcc-4.2 compiler has had.
-
-
-* For Python 2.7.x and 3.2.x, the 32-bit-only installer was configured to
- support Mac OS X 10.3.9 through (at least) 10.6. Because it is
- believed that there are few systems still running OS X 10.3 or 10.4
- and because it has become increasingly difficult to test and
- support the differences in these earlier systems, as of Python 3.3.0 the PSF
- 32-bit installer no longer supports them. For reference in building such
- an installer yourself, the details are::
-
- /usr/bin/python build-installer.py \
- --sdk-path=/Developer/SDKs/MacOSX10.4u.sdk \
- --universal-archs=32-bit \
- --dep-target=10.3
-
- - builds the following third-party libraries
-
- * Bzip2
- * NCurses
- * GNU Readline (GPL)
- * SQLite 3
- * XZ
- * Zlib 1.2.3
- * Oracle Sleepycat DB 4.8 (Python 2.x only)
-
- - requires ActiveState ``Tcl/Tk 8.4`` (currently 8.4.20) to be installed for building
+ * zlib
+ * bz2
- recommended build environment:
- * Mac OS X 10.5.8 PPC or Intel
- * Xcode 3.1.4 (or later)
- * ``MacOSX10.4u`` SDK (later SDKs do not support PPC G3 processors)
- * ``MACOSX_DEPLOYMENT_TARGET=10.3``
- * Apple ``gcc-4.0``
- * system Python 2.5 for documentation build with Sphinx
-
- - alternate build environments:
-
- * Mac OS X 10.6.8 with Xcode 3.2.6
- - need to change ``/System/Library/Frameworks/{Tcl,Tk}.framework/Version/Current`` to ``8.4``
-
+ * Mac OS X 10.9.5
+ * Xcode Command Line Tools 6.2
+ * ``MacOSX10.9`` SDK
+ * ``MACOSX_DEPLOYMENT_TARGET=10.9``
+ * Apple ``clang``
General Prerequisites
---------------------
-* No Fink (in ``/sw``) or MacPorts (in ``/opt/local``) or other local
- libraries or utilities (in ``/usr/local``) as they could
+* No Fink (in ``/sw``) or MacPorts (in ``/opt/local``) or Homebrew or
+ other local libraries or utilities (in ``/usr/local``) as they could
interfere with the build.
-* The documentation for the release is built using Sphinx
- because it is included in the installer. For 2.7.x and 3.x.x up to and
- including 3.4.0, the ``Doc/Makefile`` uses ``svn`` to download repos of
- ``Sphinx`` and its dependencies. Beginning with 3.4.1, the ``Doc/Makefile``
- assumes there is an externally-provided ``sphinx-build`` and requires at
- least Python 2.6 to run. Because of this, it is no longer possible to
- build a 3.4.1 or later installer on OS X 10.5 using the Apple-supplied
- Python 2.5.
-
* It is safest to start each variant build with an empty source directory
- populated with a fresh copy of the untarred source.
+ populated with a fresh copy of the untarred source or a source repo.
* It is recommended that you remove any existing installed version of the
Python being built::
sudo rm -rf /Library/Frameworks/Python.framework/Versions/n.n
-
-The Recipe
-----------
-
-Here are the steps you need to follow to build a Python installer:
-
-* Run ``build-installer.py``. Optionally you can pass a number of arguments
- to specify locations of various files. Please see the top of
- ``build-installer.py`` for its usage.
-
- Running this script takes some time, it will not only build Python itself
- but also some 3th-party libraries that are needed for extensions.
-
-* When done the script will tell you where the DMG image is (by default
- somewhere in ``/tmp/_py``).
-
-Building other universal installers
-...................................
-
-It is also possible to build a 4-way universal installer that runs on
-OS X 10.5 Leopard or later::
-
- /usr/bin/python /build-installer.py \
- --dep-target=10.5
- --universal-archs=all
- --sdk-path=/Developer/SDKs/MacOSX10.5.sdk
-
-This requires that the deployment target is 10.5, and hence
-also that you are building on at least OS X 10.5. 4-way includes
-``i386``, ``x86_64``, ``ppc``, and ``ppc64`` (G5). ``ppc64`` executable
-variants can only be run on G5 machines running 10.5. Note that,
-while OS X 10.6 is only supported on Intel-based machines, it is possible
-to run ``ppc`` (32-bit) executables unmodified thanks to the Rosetta ppc
-emulation in OS X 10.5 and 10.6. The 4-way installer variant must be
-built with Xcode 3. It is not regularly built or tested.
-
-Other ``--universal-archs`` options are ``64-bit`` (``x86_64``, ``ppc64``),
-and ``3-way`` (``ppc``, ``i386``, ``x86_64``). None of these options
-are regularly exercised; use at your own risk.
-
-
-Testing
--------
-
-Ideally, the resulting binaries should be installed and the test suite run
-on all supported OS X releases and architectures. As a practical matter,
-that is generally not possible. At a minimum, variant 1 should be run on
-a PPC G4 system with OS X 10.5 and at least one Intel system running OS X
-10.9, 10.8, 10.7, 10.6, or 10.5. Variant 2 should be run on 10.9, 10.8,
-10.7, and 10.6 systems in both 32-bit and 64-bit modes.::
-
- /usr/local/bin/pythonn.n -m test -w -u all,-largefile
- /usr/local/bin/pythonn.n-32 -m test -w -u all
-
-Certain tests will be skipped and some cause the interpreter to fail
-which will likely generate ``Python quit unexpectedly`` alert messages
-to be generated at several points during a test run. These are normal
-during testing and can be ignored.
-
-It is also recommend to launch IDLE and verify that it is at least
-functional. Double-click on the IDLE app icon in ``/Applications/Python n.n``.
-It should also be tested from the command line::
-
- /usr/local/bin/idlen.n
-
"""
This script is used to build "official" universal installers on macOS.
+NEW for 3.9.0 and backports:
+- 2.7 end-of-life issues:
+ - Python 3 installs now update the Current version link
+ in /Library/Frameworks/Python.framework/Versions
+- fully support running under Python 3 as well as 2.7
+- support building on newer macOS systems with SIP
+- fully support building on macOS 10.9+
+- support 10.6+ on best effort
+- support bypassing docs build by supplying a prebuilt
+ docs html tarball in the third-party source library,
+ in the format and filename conventional of those
+ downloadable from python.org:
+ python-3.x.y-docs-html.tar.bz2
+
NEW for 3.7.0:
- support Intel 64-bit-only () and 32-bit-only installer builds
- build and use internal Tcl/Tk 8.6 for 10.6+ builds
- use generic "gcc" as compiler (CC env var) rather than "gcc-4.2"
TODO:
-- support SDKROOT and DEVELOPER_DIR xcrun env variables
-- test with 10.5 and 10.4 and determine support status
-
-Please ensure that this script keeps working with Python 2.5, to avoid
-bootstrap issues (/usr/bin/python is Python 2.5 on OSX 10.5). Doc builds
-use current versions of Sphinx and require a reasonably current python3.
-Sphinx and dependencies are installed into a venv using the python3's pip
-so will fetch them from PyPI if necessary. Since python3 is now used for
-Sphinx, build-installer.py should also be converted to use python3!
-
-For 3.7.0, when building for a 10.6 or higher deployment target,
-build-installer builds and links with its own copy of Tcl/Tk 8.6.
-Otherwise, it requires an installed third-party version of
-Tcl/Tk 8.4 (for OS X 10.4 and 10.5 deployment targets), Tcl/TK 8.5
-(for 10.6 or later), or Tcl/TK 8.6 (for 10.9 or later)
-installed in /Library/Frameworks. When installed,
-the Python built by this script will attempt to dynamically link first to
-Tcl and Tk frameworks in /Library/Frameworks if available otherwise fall
-back to the ones in /System/Library/Framework. For the build, we recommend
-installing the most recent ActiveTcl 8.6. 8.5, or 8.4 version, depending
-on the deployment target. The actual version linked to depends on the
-path of /Library/Frameworks/{Tcl,Tk}.framework/Versions/Current.
+- test building with SDKROOT and DEVELOPER_DIR xcrun env variables
Usage: see USAGE variable in the script.
"""
INCLUDE_TIMESTAMP = 1
VERBOSE = 1
-from plistlib import Plist
+RUNNING_ON_PYTHON2 = sys.version_info.major == 2
-try:
+if RUNNING_ON_PYTHON2:
from plistlib import writePlist
-except ImportError:
- # We're run using python2.3
- def writePlist(plist, path):
- plist.write(path)
+else:
+ from plistlib import dump
+ def writePlist(path, plist):
+ with open(plist, 'wb') as fp:
+ dump(path, fp)
def shellQuote(value):
"""
result.extend([
dict(
- name="OpenSSL 1.1.1d",
- url="https://www.openssl.org/source/openssl-1.1.1d.tar.gz",
- checksum='3be209000dbc7e1b95bcdf47980a3baa',
+ name="OpenSSL 1.1.1g",
+ url="https://www.openssl.org/source/openssl-1.1.1g.tar.gz",
+ checksum='76766e98997660138cdaf13a187bd234',
buildrecipe=build_universal_openssl,
configure=None,
install=None,
"--libdir=/Library/Frameworks/Python.framework/Versions/%s/lib"%(getVersion(),),
],
patchscripts=[
- ("ftp://invisible-island.net/ncurses//5.9/ncurses-5.9-20120616-patch.sh.bz2",
+ ("ftp://ftp.invisible-island.net/ncurses//5.9/ncurses-5.9-20120616-patch.sh.bz2",
"f54bf02a349f96a7c4f0d00922f3a0d4"),
],
useLDFlags=False,
),
),
dict(
- name="SQLite 3.31.1",
- url="https://sqlite.org/2020/sqlite-autoconf-3310100.tar.gz",
- checksum='2d0a553534c521504e3ac3ad3b90f125',
+ name="SQLite 3.32.2",
+ url="https://sqlite.org/2020/sqlite-autoconf-3320200.tar.gz",
+ checksum='eb498918a33159cdf8104997aad29e83',
extra_cflags=('-Os '
'-DSQLITE_ENABLE_FTS5 '
'-DSQLITE_ENABLE_FTS4 '
curDir = os.getcwd()
os.chdir(buildDir)
runCommand('make clean')
- # Create virtual environment for docs builds with blurb and sphinx
- runCommand('make venv')
- runCommand('make html PYTHON=venv/bin/python')
+
+ # Search third-party source directory for a pre-built version of the docs.
+ # Use the naming convention of the docs.python.org html downloads:
+ # python-3.9.0b1-docs-html.tar.bz2
+ doctarfiles = [ f for f in os.listdir(DEPSRC)
+ if f.startswith('python-'+getFullVersion())
+ if f.endswith('-docs-html.tar.bz2') ]
+ if doctarfiles:
+ doctarfile = doctarfiles[0]
+ if not os.path.exists('build'):
+ os.mkdir('build')
+ # if build directory existed, it was emptied by make clean, above
+ os.chdir('build')
+ # Extract the first archive found for this version into build
+ runCommand('tar xjf %s'%shellQuote(os.path.join(DEPSRC, doctarfile)))
+ # see if tar extracted a directory ending in -docs-html
+ archivefiles = [ f for f in os.listdir('.')
+ if f.endswith('-docs-html')
+ if os.path.isdir(f) ]
+ if archivefiles:
+ archivefile = archivefiles[0]
+ # make it our 'Docs/build/html' directory
+ print(' -- using pre-built python documentation from %s'%archivefile)
+ os.rename(archivefile, 'html')
+ os.chdir(buildDir)
+
+ htmlDir = os.path.join('build', 'html')
+ if not os.path.exists(htmlDir):
+ # Create virtual environment for docs builds with blurb and sphinx
+ runCommand('make venv')
+ runCommand('venv/bin/python3 -m pip install -U Sphinx==2.3.1')
+ runCommand('make html PYTHON=venv/bin/python')
+ os.rename(htmlDir, docdir)
os.chdir(curDir)
- if not os.path.exists(docdir):
- os.mkdir(docdir)
- os.rename(os.path.join(buildDir, 'build', 'html'), docdir)
def buildPython():
# Since the extra libs are not in their installed framework location
# during the build, augment the library path so that the interpreter
# will find them during its extension import sanity checks.
- os.environ['DYLD_LIBRARY_PATH'] = os.path.join(WORKDIR,
- 'libraries', 'usr', 'local', 'lib')
+
print("Running configure...")
runCommand("%s -C --enable-framework --enable-universalsdk=/ "
"--with-universal-archs=%s "
"%s "
"%s "
"%s "
+ "%s "
"LDFLAGS='-g -L%s/libraries/usr/local/lib' "
"CFLAGS='-g -I%s/libraries/usr/local/include' 2>&1"%(
shellQuote(os.path.join(SRCDIR, 'configure')),
UNIVERSALARCHS,
(' ', '--with-computed-gotos ')[PYTHON_3],
(' ', '--without-ensurepip ')[PYTHON_3],
+ (' ', "--with-openssl='%s/libraries/usr/local'"%(
+ shellQuote(WORKDIR)[1:-1],))[PYTHON_3],
(' ', "--with-tcltk-includes='-I%s/libraries/usr/local/include'"%(
shellQuote(WORKDIR)[1:-1],))[internalTk()],
(' ', "--with-tcltk-libs='-L%s/libraries/usr/local/lib -ltcl8.6 -ltk8.6'"%(
shellQuote(WORKDIR)[1:-1],
shellQuote(WORKDIR)[1:-1]))
+ # As of macOS 10.11 with SYSTEM INTEGRITY PROTECTION, DYLD_*
+ # environment variables are no longer automatically inherited
+ # by child processes from their parents. We used to just set
+ # DYLD_LIBRARY_PATH, pointing to the third-party libs,
+ # in build-installer.py's process environment and it was
+ # passed through the make utility into the environment of
+ # setup.py. Instead, we now append DYLD_LIBRARY_PATH to
+ # the existing RUNSHARED configuration value when we call
+ # make for extension module builds.
+
+ runshared_for_make = "".join([
+ " RUNSHARED=",
+ "'",
+ grepValue("Makefile", "RUNSHARED"),
+ ' DYLD_LIBRARY_PATH=',
+ os.path.join(WORKDIR, 'libraries', 'usr', 'local', 'lib'),
+ "'" ])
+
# Look for environment value BUILDINSTALLER_BUILDPYTHON_MAKE_EXTRAS
# and, if defined, append its value to the make command. This allows
# us to pass in version control tags, like GITTAG, to a build from a
make_extras = os.getenv("BUILDINSTALLER_BUILDPYTHON_MAKE_EXTRAS")
if make_extras:
- make_cmd = "make " + make_extras
+ make_cmd = "make " + make_extras + runshared_for_make
else:
- make_cmd = "make"
+ make_cmd = "make" + runshared_for_make
print("Running " + make_cmd)
runCommand(make_cmd)
- print("Running make install")
- runCommand("make install DESTDIR=%s"%(
- shellQuote(rootDir)))
+ make_cmd = "make install DESTDIR=%s %s"%(
+ shellQuote(rootDir),
+ runshared_for_make)
+ print("Running " + make_cmd)
+ runCommand(make_cmd)
- print("Running make frameworkinstallextras")
- runCommand("make frameworkinstallextras DESTDIR=%s"%(
- shellQuote(rootDir)))
+ make_cmd = "make frameworkinstallextras DESTDIR=%s %s"%(
+ shellQuote(rootDir),
+ runshared_for_make)
+ print("Running " + make_cmd)
+ runCommand(make_cmd)
- del os.environ['DYLD_LIBRARY_PATH']
print("Copying required shared libraries")
if os.path.exists(os.path.join(WORKDIR, 'libraries', 'Library')):
build_lib_dir = os.path.join(
if ln.startswith('VERSION='):
VERSION=ln.split()[1]
if ln.startswith('ABIFLAGS='):
- ABIFLAGS=ln.split()[1]
+ ABIFLAGS=ln.split()
+ ABIFLAGS=ABIFLAGS[1] if len(ABIFLAGS) > 1 else ''
if ln.startswith('LDVERSION='):
LDVERSION=ln.split()[1]
fp.close()
import pprint
if getVersionMajorMinor() >= (3, 6):
# XXX this is extra-fragile
- path = os.path.join(path_to_lib, '_sysconfigdata_m_darwin_darwin.py')
+ path = os.path.join(path_to_lib,
+ '_sysconfigdata_%s_darwin_darwin.py' % (ABIFLAGS,))
else:
path = os.path.join(path_to_lib, '_sysconfigdata.py')
fp = open(path, 'r')
data = fp.read()
fp.close()
# create build_time_vars dict
- exec(data)
+ if RUNNING_ON_PYTHON2:
+ exec(data)
+ else:
+ g_dict = {}
+ l_dict = {}
+ exec(data, g_dict, l_dict)
+ build_time_vars = l_dict['build_time_vars']
vars = {}
for k, v in build_time_vars.items():
if type(v) == type(''):
os.chdir(curdir)
- if PYTHON_3:
- # Remove the 'Current' link, that way we don't accidentally mess
- # with an already installed version of python 2
- os.unlink(os.path.join(rootDir, 'Library', 'Frameworks',
- 'Python.framework', 'Versions', 'Current'))
-
def patchFile(inPath, outPath):
data = fileContents(inPath)
data = data.replace('$FULL_VERSION', getFullVersion())
vers = getFullVersion()
major, minor = getVersionMajorMinor()
- pl = Plist(
+ pl = dict(
CFBundleGetInfoString="Python.%s %s"%(pkgname, vers,),
CFBundleIdentifier='org.python.Python.%s'%(pkgname,),
CFBundleName='Python.%s'%(pkgname,),
)
writePlist(pl, os.path.join(packageContents, 'Info.plist'))
- pl = Plist(
+ pl = dict(
IFPkgDescriptionDescription=readme,
IFPkgDescriptionTitle=recipe.get('long_name', "Python.%s"%(pkgname,)),
IFPkgDescriptionVersion=vers,
vers = getFullVersion()
major, minor = getVersionMajorMinor()
- pl = Plist(
+ pl = dict(
CFBundleGetInfoString="Python %s"%(vers,),
CFBundleIdentifier='org.python.Python',
CFBundleName='Python',
os.mkdir(rsrcDir)
makeMpkgPlist(os.path.join(pkgroot, 'Info.plist'))
- pl = Plist(
+ pl = dict(
IFPkgDescriptionTitle="Python",
IFPkgDescriptionVersion=getVersion(),
)
-{\rtf1\ansi\ansicpg1252\cocoartf2511
+{\rtf1\ansi\ansicpg1252\cocoartf2513
\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fswiss\fcharset0 Helvetica-Oblique;
\f3\fmodern\fcharset0 CourierNewPSMT;}
{\colortbl;\red255\green255\blue255;}
\
\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0
-\f1\b \cf0 \ul \ulc0 Which installer variant should I use?
+\f1\b \cf0 \ul Which installer variant should I use?
\f0\b0 \ulnone \
\
\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\partightenfactor0
\f0\b0 variant is no longer provided. \
\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0
-\f1\b \cf0 \ul \ulc0 \
+\f1\b \cf0 \ul \
Using IDLE or other Tk applications
\f0\b0 \ulnone \
\
\f0\b0 file dialog windows. Click on the
\f1\b OK
\f0\b0 button to proceed.\
+
+\f1\b \ul \
+Python 2.7 end-of-life [changed in 3.7.8]\
+\
\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0
-\f1\b \cf0 \ul \ulc0 \
+\f0\b0 \cf0 \ulnone Python 2.7 has now reached end-of-life. As of Python 3.7.8, the
+\f3 Python Launcher
+\f0 app now has
+\f3 python3
+\f0 factory defaults. Also, the
+\f3 Current
+\f0 link in the
+\f3 /Library/Frameworks/Python.framework/Versions
+\f0 directory is now updated to point to the Python 3 being installed; previously, only Python 2 installs updated
+\f3 Current
+\f0 . This change might affect developers using the framework to embed Python in their applications. If another version is desired for embedding, the
+\f3 Current
+\f0 symlink can be changed manually without affecting 3.7.x behavior.\
+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0
+
+\f1\b \cf0 \ul \
Other changes\
\
+\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0
-\f0\b0 \ulnone For other changes in this release, see the
+\f0\b0 \cf0 \ulnone For other changes in this release, see the
\f2\i What's new
\f0\i0 section in the {\field{\*\fldinst{HYPERLINK "https://www.python.org/doc/"}}{\fldrslt Documentation Set}} for this release and its
\f2\i Release Notes
# make link in /Applications/Python m.n/ for Finder users
if [ -d "${APPDIR}" ]; then
ln -fhs "${FWK_DOCDIR}/index.html" "${APPDIR}/Python Documentation.html"
- open "${APPDIR}" || true # open the applications folder
+ if [ "${COMMAND_LINE_INSTALL}" != 1 ]; then
+ open "${APPDIR}" || true # open the applications folder
+ fi
fi
# make share/doc link in framework for command line users
<false/>
<key>interpreter_list</key>
<array>
- <string>/usr/local/bin/pythonw</string>
- <string>/usr/bin/pythonw</string>
- <string>/sw/bin/pythonw</string>
+ <string>/usr/local/bin/python3</string>
+ <string>/opt/local/bin/python3</string>
+ <string>/sw/bin/python3</string>
</array>
<key>honourhashbang</key>
<false/>
<false/>
<key>interpreter_list</key>
<array>
- <string>/usr/local/bin/pythonw</string>
- <string>/usr/local/bin/python</string>
- <string>/usr/bin/pythonw</string>
- <string>/usr/bin/python</string>
- <string>/sw/bin/pythonw</string>
- <string>/sw/bin/python</string>
+ <string>/usr/local/bin/python3</string>
+ <string>/opt/local/bin/python3</string>
+ <string>/sw/bin/python3</string>
</array>
<key>honourhashbang</key>
<false/>
<false/>
<key>interpreter_list</key>
<array>
- <string>/usr/local/bin/pythonw</string>
- <string>/usr/local/bin/python</string>
- <string>/usr/bin/pythonw</string>
- <string>/usr/bin/python</string>
- <string>/sw/bin/pythonw</string>
- <string>/sw/bin/python</string>
+ <string>/usr/local/bin/python3</string>
+ <string>/opt/local/bin/python3</string>
+ <string>/sw/bin/python3</string>
</array>
<key>honourhashbang</key>
<false/>
<key>CFBundleExecutable</key>
<string>Python</string>
<key>CFBundleGetInfoString</key>
- <string>%version%, (c) 2001-2016 Python Software Foundation.</string>
+ <string>%version%, (c) 2001-2020 Python Software Foundation.</string>
<key>CFBundleHelpBookFolder</key>
<array>
<string>Documentation</string>
<key>NSAppleScriptEnabled</key>
<true/>
<key>NSHumanReadableCopyright</key>
- <string>(c) 2001-2016 Python Software Foundation.</string>
+ <string>(c) 2001-2020 Python Software Foundation.</string>
<key>NSHighResolutionCapable</key>
<true/>
</dict>
}
}
+ /*
+ * The environment variable is used to pass the value of real_path
+ * to the actual python interpreter, and is read by code in
+ * Python/coreconfig.c.
+ *
+ * This way the real interpreter knows how the user invoked the
+ * interpreter and can behave as if this launcher is the real
+ * interpreter (looking for pyvenv configuration, ...)
+ */
setenv("__PYVENV_LAUNCHER__", real_path, 1);
}
Billy G. Allie
Jamiel Almeida
Kevin Altis
+Samy Lahfa
Skyler Leigh Amador
Joe Amenta
Rose Ames
Jaromir Dolecek
Zsolt Dollenstein
Brendan Donegan
+Peter Donis
Ismail Donmez
Ray Donnelly
Robert Donohue
David Jacobs
Kevin Jacobs
Kjetil Jacobsen
+Shantanu Jain
Bertrand Janin
Geert Jansen
Jack Jansen
Guido Kollerie
Jacek Kołodziej
Jacek Konieczny
+Krzysztof Konopko
Arkady Koplyarov
Peter A. Koren
Марк Коренберг
Bryan Olson
Grant Olson
Koray Oner
+Ethan Onstott
Piet van Oostrum
Tomas Oppelstrup
Jason Orendorff
Python News
+++++++++++
+What's New in Python 3.7.8 final?
+=================================
+
+*Release date: 2020-06-27*
+
+Tests
+-----
+
+- bpo-41009: Fix use of ``support.require_{linux|mac|freebsd}_version()``
+ decorators as class decorator.
+
+macOS
+-----
+
+- bpo-41100: Fix configure error when building on macOS 11. Note that 3.7.8
+ was released shortly after the first developer preview of macOS 11 (Big
+ Sur); there are other known issues with building and running on the
+ developer preview. Big Sur is expected to be fully supported in a future
+ bugfix release of Python 3.8.x and with 3.9.0.
+
+
+What's New in Python 3.7.8 release candidate 1?
+===============================================
+
+*Release date: 2020-06-17*
+
+Security
+--------
+
+- bpo-39073: Disallow CR or LF in email.headerregistry.Address arguments to
+ guard against header injection attacks.
+
+- bpo-38576: Disallow control characters in hostnames in http.client,
+ addressing CVE-2019-18348. Such potentially malicious header injection
+ URLs now cause a InvalidURL to be raised.
+
+- bpo-39503: CVE-2020-8492: The
+ :class:`~urllib.request.AbstractBasicAuthHandler` class of the
+ :mod:`urllib.request` module uses an inefficient regular expression which
+ can be exploited by an attacker to cause a denial of service. Fix the
+ regex to prevent the catastrophic backtracking. Vulnerability reported by
+ Ben Caller and Matt Schwager.
+
+Core and Builtins
+-----------------
+
+- bpo-40663: Correctly generate annotations where parentheses are omitted
+ but required (e.g: ``Type[(str, int, *other))]``.
+
+- bpo-40417: Fix imp module deprecation warning when PyImport_ReloadModule
+ is called. Patch by Robert Rouhani.
+
+- bpo-20526: Fix :c:func:`PyThreadState_Clear()`. ``PyThreadState.frame`` is
+ a borrowed reference, not a strong reference: ``PyThreadState_Clear()``
+ must not call ``Py_CLEAR(tstate->frame)``.
+
+- bpo-38894: Fix a bug that was causing incomplete results when calling
+ ``pathlib.Path.glob`` in the presence of symlinks that point to files
+ where the user does not have read access. Patch by Pablo Galindo and Matt
+ Wozniski.
+
+- bpo-39871: Fix a possible :exc:`SystemError` in
+ ``math.{atan2,copysign,remainder}()`` when the first argument cannot be
+ converted to a :class:`float`. Patch by Zachary Spytz.
+
+- bpo-39520: Fix unparsing of ext slices with no items (``foo[:,]``). Patch
+ by Batuhan Taskaya.
+
+- bpo-24048: Save the live exception during import.c's ``remove_module()``.
+
+- bpo-22490: Don't leak environment variable ``__PYVENV_LAUNCHER__`` into
+ the interpreter session on macOS.
+
+Library
+-------
+
+- bpo-40448: :mod:`ensurepip` now disables the use of `pip` cache when
+ installing the bundled versions of `pip` and `setuptools`. Patch by
+ Krzysztof Konopko.
+
+- bpo-40807: Stop codeop._maybe_compile, used by code.InteractiveInterpreter
+ (and IDLE). from from emitting each warning three times.
+
+- bpo-38488: Update ensurepip to install pip 20.1.1 and setuptools 47.1.0.
+
+- bpo-40767: :mod:`webbrowser` now properly finds the default browser in
+ pure Wayland systems by checking the WAYLAND_DISPLAY environment variable.
+ Patch contributed by Jérémy Attali.
+
+- bpo-30008: Fix :mod:`ssl` code to be compatible with OpenSSL 1.1.x builds
+ that use ``no-deprecated`` and ``--api=1.1.0``.
+
+- bpo-25872: :mod:`linecache` could crash with a :exc:`KeyError` when
+ accessed from multiple threads. Fix by Michael Graczyk.
+
+- bpo-40515: The :mod:`ssl` and :mod:`hashlib` modules now actively check
+ that OpenSSL is build with thread support. Python 3.7.0 made thread
+ support mandatory and no longer works safely with a no-thread builds.
+
+- bpo-13097: ``ctypes`` now raises an ``ArgumentError`` when a callback is
+ invoked with more than 1024 arguments.
+
+- bpo-40559: Fix possible memory leak in the C implementation of
+ :class:`asyncio.Task`.
+
+- bpo-40457: The ssl module now support OpenSSL builds without TLS 1.0 and
+ 1.1 methods.
+
+- bpo-40459: :func:`platform.win32_ver` now produces correct *ptype* strings
+ instead of empty strings.
+
+- bpo-40138: Fix the Windows implementation of :func:`os.waitpid` for exit
+ code larger than ``INT_MAX >> 8``. The exit status is now interpreted as
+ an unsigned number.
+
+- bpo-39942: Set "__main__" as the default module name when "__name__" is
+ missing in :class:`typing.TypeVar`. Patch by Weipeng Hong.
+
+- bpo-40287: Fixed ``SpooledTemporaryFile.seek()`` to return the position.
+
+- bpo-40196: Fix a bug in the :mod:`symtable` module that was causing
+ incorrectly report global variables as local. Patch by Pablo Galindo.
+
+- bpo-40126: Fixed reverting multiple patches in unittest.mock. Patcher's
+ ``__exit__()`` is now never called if its ``__enter__()`` is failed.
+ Returning true from ``__exit__()`` silences now the exception.
+
+- bpo-40089: Fix threading._after_fork(): if fork was not called by a thread
+ spawned by threading.Thread, threading._after_fork() now creates a
+ _MainThread instance for _main_thread, instead of a _DummyThread instance.
+
+- bpo-39503: :class:`~urllib.request.AbstractBasicAuthHandler` of
+ :mod:`urllib.request` now parses all WWW-Authenticate HTTP headers and
+ accepts multiple challenges per header: use the realm of the first Basic
+ challenge.
+
+- bpo-40014: Fix ``os.getgrouplist()``: if ``getgrouplist()`` function fails
+ because the group list is too small, retry with a larger group list. On
+ failure, the glibc implementation of ``getgrouplist()`` sets ``ngroups``
+ to the total number of groups. For other implementations, double the group
+ list size.
+
+- bpo-40025: Raise TypeError when _generate_next_value_ is defined after
+ members. Patch by Ethan Onstott.
+
+- bpo-40016: In re docstring, clarify the relationship between inline and
+ argument compile flags.
+
+- bpo-39652: The column name found in ``sqlite3.Cursor.description`` is now
+ truncated on the first '[' only if the PARSE_COLNAMES option is set.
+
+- bpo-38662: The ``ensurepip`` module now invokes ``pip`` via the ``runpy``
+ module. Hence it is no longer tightly coupled with the internal API of the
+ bundled ``pip`` version, allowing easier updates to a newer ``pip``
+ version both internally and for distributors.
+
+- bpo-39916: More reliable use of ``os.scandir()`` in ``Path.glob()``. It no
+ longer emits a ResourceWarning when interrupted.
+
+- bpo-39850: :mod:`multiprocessing` now supports abstract socket addresses
+ (if abstract sockets are supported in the running platform). Patch by
+ Pablo Galindo.
+
+- bpo-39828: Fix :mod:`json.tool` to catch :exc:`BrokenPipeError`. Patch by
+ Dong-hee Na.
+
+- bpo-39040: Fix parsing of invalid mime headers parameters by collapsing
+ whitespace between encoded words in a bare-quote-string.
+
+- bpo-35714: :exc:`struct.error` is now raised if there is a null character
+ in a :mod:`struct` format string.
+
+- bpo-36541: lib2to3 now recognizes named assignment expressions (the walrus
+ operator, ``:=``)
+
+- bpo-29620: :func:`~unittest.TestCase.assertWarns` no longer raises a
+ ``RuntimeException`` when accessing a module's ``__warningregistry__``
+ causes importation of a new module, or when a new module is imported in
+ another thread. Patch by Kernc.
+
+- bpo-34226: Fix `cgi.parse_multipart` without content_length. Patch by
+ Roger Duran
+
+- bpo-31758: Prevent crashes when using an uninitialized
+ ``_elementtree.XMLParser`` object. Patch by Oren Milman.
+
+Documentation
+-------------
+
+- bpo-40561: Provide docstrings for webbrowser open functions.
+
+- bpo-27635: The pickle documentation incorrectly claimed that ``__new__``
+ isn't called by default when unpickling.
+
+- bpo-39879: Updated :ref:`datamodel` docs to include :func:`dict` insertion
+ order preservation. Patch by Furkan Onder and Samy Lahfa.
+
+- bpo-39677: Changed operand name of **MAKE_FUNCTION** from *argc* to
+ *flags* for module :mod:`dis`
+
+- bpo-39435: Fix an incorrect signature for :func:`pickle.loads` in the docs
+
+- bpo-38387: Document :c:macro:`PyDoc_STRVAR` macro in the C-API reference.
+
+Tests
+-----
+
+- bpo-40964: Disable remote :mod:`imaplib` tests, host cyrus.andrew.cmu.edu
+ is blocking incoming connections.
+
+- bpo-40055: distutils.tests now saves/restores warnings filters to leave
+ them unchanged. Importing tests imports docutils which imports
+ pkg_resources which adds a warnings filter.
+
+- bpo-40436: test_gdb and test.pythoninfo now check gdb command exit code.
+
+- bpo-39932: Fix multiprocessing test_heap(): a new Heap object is now
+ created for each test run.
+
+- bpo-40162: Update Travis CI configuration to OpenSSL 1.1.1f.
+
+- bpo-40146: Update OpenSSL to 1.1.1f in Azure Pipelines.
+
+- bpo-40019: test_gdb now skips tests if it detects that gdb failed to read
+ debug information because the Python binary is optimized.
+
+- bpo-27807: ``test_site.test_startup_imports()`` is now skipped if a path
+ of :data:`sys.path` contains a ``.pth`` file.
+
+- bpo-39793: Use the same domain when testing ``make_msgid``. Patch by
+ Batuhan Taskaya.
+
+- bpo-1812: Fix newline handling in doctest.testfile when loading from a
+ package whose loader has a get_data method. Patch by Peter Donis.
+
+- bpo-37957: test.regrtest now can receive a list of test patterns to ignore
+ (using the -i/--ignore argument) or a file with a list of patterns to
+ ignore (using the --ignore-file argument). Patch by Pablo Galindo.
+
+- bpo-38502: test.regrtest now uses process groups in the multiprocessing
+ mode (-jN command line option) if process groups are available: if
+ :func:`os.setsid` and :func:`os.killpg` functions are available.
+
+- bpo-37421: multiprocessing tests now stop the ForkServer instance if it's
+ running: close the "alive" file descriptor to ask the server to stop and
+ then remove its UNIX address.
+
+- bpo-37421: multiprocessing tests now explicitly call ``_run_finalizers()``
+ to immediately remove temporary directories created by tests.
+
+Build
+-----
+
+- bpo-40653: Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling
+ issue.
+
+- bpo-38360: Support single-argument form of macOS -isysroot flag.
+
+- bpo-40204: Pin Sphinx version to 2.3.1 in ``Doc/Makefile``.
+
+- bpo-40158: Fix CPython MSBuild Properties in NuGet Package
+ (build/native/python.props)
+
+Windows
+-------
+
+- bpo-40164: Updates Windows OpenSSL to 1.1.1g
+
+- bpo-39631: Changes the registered MIME type for ``.py`` files on Windows
+ to ``text/x-python`` instead of ``text/plain``.
+
+- bpo-40650: Include winsock2.h in pytime.c for timeval.
+
+- bpo-39930: Ensures the required :file:`vcruntime140.dll` is included in
+ install packages.
+
+- bpo-39847: Avoid hang when computer is hibernated whilst waiting for a
+ mutex (for lock-related objects from :mod:`threading`) around 49-day
+ uptime.
+
+- bpo-38492: Remove ``pythonw.exe`` dependency on the Microsoft C++ runtime.
+
+macOS
+-----
+
+- bpo-39580: Avoid opening Finder window if running installer from the
+ command line.
+
+- bpo-40400: Update the macOS installer build scripts to build with Python
+ 3.x and to build correctly on newer macOS systems with SIP.
+
+- bpo-40741: Update macOS installer to use SQLite 3.32.2.
+
+- bpo-38329: python.org macOS installers now update the Current version
+ symlink of /Library/Frameworks/Python.framework/Versions for 3.9 installs.
+ Previously, Current was only updated for Python 2.x installs. This should
+ make it easier to embed Python 3 into other macOS applications.
+
+- bpo-40164: Update macOS installer builds to use OpenSSL 1.1.1g.
+
+IDLE
+----
+
+- bpo-39885: Make context menu Cut and Copy work again when right-clicking
+ within a selection.
+
+- bpo-40723: Make test_idle pass when run after import.
+
+- bpo-27115: For 'Go to Line', use a Query box subclass with IDLE standard
+ behavior and improved error checking.
+
+- bpo-39885: Since clicking to get an IDLE context menu moves the cursor,
+ any text selection should be and now is cleared.
+
+- bpo-39852: Edit "Go to line" now clears any selection, preventing
+ accidental deletion. It also updates Ln and Col on the status bar.
+
+- bpo-38439: Add a 256×256 pixel IDLE icon to support more modern
+ environments. Created by Andrew Clover. Delete the unused macOS idle.icns
+ icon file.
+
+- bpo-38689: IDLE will no longer freeze when inspect.signature fails when
+ fetching a calltip.
+
+Tools/Demos
+-----------
+
+- bpo-40479: Update multissltest helper to test with latest OpenSSL 1.0.2,
+ 1.1.0, 1.1.1, and 3.0.0-alpha.
+
+- bpo-40179: Fixed translation of ``#elif`` in Argument Clinic.
+
+- bpo-40163: Fix multissltest tool. OpenSSL has changed download URL for old
+ releases. The multissltest tool now tries to download from current and old
+ download URLs.
+
+- bpo-36184: Port python-gdb.py to FreeBSD. python-gdb.py now checks for
+ "take_gil" function name to check if a frame tries to acquire the GIL,
+ instead of checking for "pthread_cond_timedwait" which is specific to
+ Linux and can be a different condition than the GIL.
+
+- bpo-39889: Fixed ``unparse.py`` for extended slices containing a single
+ element (e.g. ``a[i:j,]``). Remove redundant tuples when index with a
+ tuple (e.g. ``a[i, j]``).
+
+C API
+-----
+
+- bpo-39884: :c:func:`_PyMethodDef_RawFastCallDict` and
+ :c:func:`_PyMethodDef_RawFastCallKeywords` now include the method name in
+ the SystemError "bad call flags" error message to ease debug.
+
+- bpo-38643: :c:func:`PyNumber_ToBase` now raises a :exc:`SystemError`
+ instead of crashing when called with invalid base.
+
+
What's New in Python 3.7.7 final?
=================================
end
# generally useful macro to print a Unicode string
-def pu
+define pu
set $uni = $arg0
set $i = 0
while (*$uni && $i++<100)
coro = task->task_coro;
if (coro == NULL) {
PyErr_SetString(PyExc_RuntimeError, "uninitialized Task object");
+ if (clear_exc) {
+ /* We created 'exc' during this call */
+ Py_DECREF(exc);
+ }
return NULL;
}
#endif
/*
+ * bpo-13097: Max number of arguments _ctypes_callproc will accept.
+ *
+ * This limit is enforced for the `alloca()` call in `_ctypes_callproc`,
+ * to avoid allocating a massive buffer on the stack.
+ */
+#define CTYPES_MAX_ARGCOUNT 1024
+
+/*
* Requirements, must be ensured by the caller:
* - argtuple is tuple of arguments
* - argtypes is either NULL, or a tuple of the same size as argtuple
++argcount;
#endif
+ if (argcount > CTYPES_MAX_ARGCOUNT)
+ {
+ PyErr_Format(PyExc_ArgError, "too many arguments (%zi), maximum is %i",
+ argcount, CTYPES_MAX_ARGCOUNT);
+ return NULL;
+ }
+
args = (struct argument *)alloca(sizeof(struct argument) * argcount);
if (!args) {
PyErr_NoMemory();
const mpd_context_t *ctx, uint32_t *status)
{
_mpd_qdiv(SET_IDEAL_EXP, q, a, b, ctx, status);
-
- if (*status & MPD_Malloc_error) {
- /* Inexact quotients (the usual case) fill the entire context precision,
- * which can lead to malloc() failures for very high precisions. Retry
- * the operation with a lower precision in case the result is exact.
- *
- * We need an upper bound for the number of digits of a_coeff / b_coeff
- * when the result is exact. If a_coeff' * 1 / b_coeff' is in lowest
- * terms, then maxdigits(a_coeff') + maxdigits(1 / b_coeff') is a suitable
- * bound.
- *
- * 1 / b_coeff' is exact iff b_coeff' exclusively has prime factors 2 or 5.
- * The largest amount of digits is generated if b_coeff' is a power of 2 or
- * a power of 5 and is less than or equal to log5(b_coeff') <= log2(b_coeff').
- *
- * We arrive at a total upper bound:
- *
- * maxdigits(a_coeff') + maxdigits(1 / b_coeff') <=
- * a->digits + log2(b_coeff) =
- * a->digits + log10(b_coeff) / log10(2) <=
- * a->digits + b->digits * 4;
- */
- uint32_t workstatus = 0;
- mpd_context_t workctx = *ctx;
- workctx.prec = a->digits + b->digits * 4;
- if (workctx.prec >= ctx->prec) {
- return; /* No point in retrying, keep the original error. */
- }
-
- _mpd_qdiv(SET_IDEAL_EXP, q, a, b, &workctx, &workstatus);
- if (workstatus == 0) { /* The result is exact, unrounded, normal etc. */
- *status = 0;
- return;
- }
-
- mpd_seterror(q, *status, status);
- }
}
/* Internal function. */
/* END LIBMPDEC_ONLY */
/* Algorithm from decimal.py */
-static void
-_mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
- uint32_t *status)
+void
+mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
+ uint32_t *status)
{
mpd_context_t maxcontext;
MPD_NEW_STATIC(c,0,0,0,0);
goto out;
}
-void
-mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx,
- uint32_t *status)
-{
- _mpd_qsqrt(result, a, ctx, status);
-
- if (*status & (MPD_Malloc_error|MPD_Division_impossible)) {
- /* The above conditions can occur at very high context precisions
- * if intermediate values get too large. Retry the operation with
- * a lower context precision in case the result is exact.
- *
- * If the result is exact, an upper bound for the number of digits
- * is the number of digits in the input.
- *
- * NOTE: sqrt(40e9) = 2.0e+5 /\ digits(40e9) = digits(2.0e+5) = 2
- */
- uint32_t workstatus = 0;
- mpd_context_t workctx = *ctx;
- workctx.prec = a->digits;
-
- if (workctx.prec >= ctx->prec) {
- return; /* No point in repeating this, keep the original error. */
- }
-
- _mpd_qsqrt(result, a, &workctx, &workstatus);
- if (workstatus == 0) {
- *status = 0;
- return;
- }
-
- mpd_seterror(result, *status, status);
- }
-}
-
/******************************************************************************/
/* Base conversions */
'special': ('context.__reduce_ex__', 'context.create_decimal_from_float')
}
-# Functions that set no context flags but whose result can differ depending
-# on prec, Emin and Emax.
-MaxContextSkip = ['is_normal', 'is_subnormal', 'logical_invert', 'next_minus',
- 'next_plus', 'number_class', 'logical_and', 'logical_or',
- 'logical_xor', 'next_toward', 'rotate', 'shift']
-
# Functions that require a restricted exponent range for reasonable runtimes.
UnaryRestricted = [
'__ceil__', '__floor__', '__int__', '__trunc__',
self.pex = RestrictedList() # Python exceptions for P.Decimal
self.presults = RestrictedList() # P.Decimal results
- # If the above results are exact, unrounded and not clamped, repeat
- # the operation with a maxcontext to ensure that huge intermediate
- # values do not cause a MemoryError.
- self.with_maxcontext = False
- self.maxcontext = context.c.copy()
- self.maxcontext.prec = C.MAX_PREC
- self.maxcontext.Emax = C.MAX_EMAX
- self.maxcontext.Emin = C.MIN_EMIN
- self.maxcontext.clear_flags()
-
- self.maxop = RestrictedList() # converted C.Decimal operands
- self.maxex = RestrictedList() # Python exceptions for C.Decimal
- self.maxresults = RestrictedList() # C.Decimal results
-
# ======================================================================
# SkipHandler: skip known discrepancies
if t.contextfunc:
cargs = t.cop
pargs = t.pop
- maxargs = t.maxop
cfunc = "c_func: %s(" % t.funcname
pfunc = "p_func: %s(" % t.funcname
- maxfunc = "max_func: %s(" % t.funcname
else:
cself, cargs = t.cop[0], t.cop[1:]
pself, pargs = t.pop[0], t.pop[1:]
- maxself, maxargs = t.maxop[0], t.maxop[1:]
cfunc = "c_func: %s.%s(" % (repr(cself), t.funcname)
pfunc = "p_func: %s.%s(" % (repr(pself), t.funcname)
- maxfunc = "max_func: %s.%s(" % (repr(maxself), t.funcname)
err = cfunc
for arg in cargs:
err = err.rstrip(", ")
err += ")"
- if t.with_maxcontext:
- err += "\n"
- err += maxfunc
- for arg in maxargs:
- err += "%s, " % repr(arg)
- err = err.rstrip(", ")
- err += ")"
-
return err
def raise_error(t):
err = "Error in %s:\n\n" % t.funcname
err += "input operands: %s\n\n" % (t.op,)
err += function_as_string(t)
-
- err += "\n\nc_result: %s\np_result: %s\n" % (t.cresults, t.presults)
- if t.with_maxcontext:
- err += "max_result: %s\n\n" % (t.maxresults)
- else:
- err += "\n"
-
- err += "c_exceptions: %s\np_exceptions: %s\n" % (t.cex, t.pex)
- if t.with_maxcontext:
- err += "max_exceptions: %s\n\n" % t.maxex
- else:
- err += "\n"
-
- err += "%s\n" % str(t.context)
- if t.with_maxcontext:
- err += "%s\n" % str(t.maxcontext)
- else:
- err += "\n"
+ err += "\n\nc_result: %s\np_result: %s\n\n" % (t.cresults, t.presults)
+ err += "c_exceptions: %s\np_exceptions: %s\n\n" % (t.cex, t.pex)
+ err += "%s\n\n" % str(t.context)
raise VerifyError(err)
# are printed to stdout.
# ======================================================================
-def all_nan(a):
- if isinstance(a, C.Decimal):
- return a.is_nan()
- elif isinstance(a, tuple):
- return all(all_nan(v) for v in a)
- return False
-
def convert(t, convstr=True):
""" t is the testset. At this stage the testset contains a tuple of
operands t.op of various types. For decimal methods the first
for i, op in enumerate(t.op):
context.clear_status()
- t.maxcontext.clear_flags()
if op in RoundModes:
t.cop.append(op)
t.pop.append(op)
- t.maxop.append(op)
elif not t.contextfunc and i == 0 or \
convstr and isinstance(op, str):
p = None
pex = e.__class__
- try:
- C.setcontext(t.maxcontext)
- maxop = C.Decimal(op)
- maxex = None
- except (TypeError, ValueError, OverflowError) as e:
- maxop = None
- maxex = e.__class__
- finally:
- C.setcontext(context.c)
-
t.cop.append(c)
t.cex.append(cex)
-
t.pop.append(p)
t.pex.append(pex)
- t.maxop.append(maxop)
- t.maxex.append(maxex)
-
if cex is pex:
if str(c) != str(p) or not context.assert_eq_status():
raise_error(t)
else:
raise_error(t)
- # The exceptions in the maxcontext operation can legitimately
- # differ, only test that maxex implies cex:
- if maxex is not None and cex is not maxex:
- raise_error(t)
-
elif isinstance(op, Context):
t.context = op
t.cop.append(op.c)
t.pop.append(op.p)
- t.maxop.append(t.maxcontext)
else:
t.cop.append(op)
t.pop.append(op)
- t.maxop.append(op)
return 1
t.rc and t.rp are the results of the operation.
"""
context.clear_status()
- t.maxcontext.clear_flags()
try:
if t.contextfunc:
t.rp = None
t.pex.append(e.__class__)
- # If the above results are exact, unrounded, normal etc., repeat the
- # operation with a maxcontext to ensure that huge intermediate values
- # do not cause a MemoryError.
- if (t.funcname not in MaxContextSkip and
- not context.c.flags[C.InvalidOperation] and
- not context.c.flags[C.Inexact] and
- not context.c.flags[C.Rounded] and
- not context.c.flags[C.Subnormal] and
- not context.c.flags[C.Clamped] and
- not context.clamp and # results are padded to context.prec if context.clamp==1.
- not any(isinstance(v, C.Context) for v in t.cop)): # another context is used.
- t.with_maxcontext = True
- try:
- if t.contextfunc:
- maxargs = t.maxop
- t.rmax = getattr(t.maxcontext, t.funcname)(*maxargs)
- else:
- maxself = t.maxop[0]
- maxargs = t.maxop[1:]
- try:
- C.setcontext(t.maxcontext)
- t.rmax = getattr(maxself, t.funcname)(*maxargs)
- finally:
- C.setcontext(context.c)
- t.maxex.append(None)
- except (TypeError, ValueError, OverflowError, MemoryError) as e:
- t.rmax = None
- t.maxex.append(e.__class__)
-
def verify(t, stat):
""" t is the testset. At this stage the testset contains the following
tuples:
"""
t.cresults.append(str(t.rc))
t.presults.append(str(t.rp))
- if t.with_maxcontext:
- t.maxresults.append(str(t.rmax))
-
if isinstance(t.rc, C.Decimal) and isinstance(t.rp, P.Decimal):
# General case: both results are Decimals.
t.cresults.append(t.rc.to_eng_string())
t.presults.append(str(t.rp.imag))
t.presults.append(str(t.rp.real))
- if t.with_maxcontext and isinstance(t.rmax, C.Decimal):
- t.maxresults.append(t.rmax.to_eng_string())
- t.maxresults.append(t.rmax.as_tuple())
- t.maxresults.append(str(t.rmax.imag))
- t.maxresults.append(str(t.rmax.real))
-
nc = t.rc.number_class().lstrip('+-s')
stat[nc] += 1
else:
if not isinstance(t.rc, tuple) and not isinstance(t.rp, tuple):
if t.rc != t.rp:
raise_error(t)
- if t.with_maxcontext and not isinstance(t.rmax, tuple):
- if t.rmax != t.rc:
- raise_error(t)
stat[type(t.rc).__name__] += 1
# The return value lists must be equal.
if not t.context.assert_eq_status():
raise_error(t)
- if t.with_maxcontext:
- # NaN payloads etc. depend on precision and clamp.
- if all_nan(t.rc) and all_nan(t.rmax):
- return
- # The return value lists must be equal.
- if t.maxresults != t.cresults:
- raise_error(t)
- # The Python exception lists (TypeError, etc.) must be equal.
- if t.maxex != t.cex:
- raise_error(t)
- # The context flags must be equal.
- if t.maxcontext.flags != t.context.c.flags:
- raise_error(t)
-
# ======================================================================
# Main test loops
_elementtree_Element___getstate___impl(ElementObject *self)
/*[clinic end generated code: output=37279aeeb6bb5b04 input=f0d16d7ec2f7adc1]*/
{
- Py_ssize_t i, noattrib;
- PyObject *instancedict = NULL, *children;
+ Py_ssize_t i;
+ PyObject *children, *attrib;
/* Build a list of children. */
children = PyList_New(self->extra ? self->extra->length : 0);
PyList_SET_ITEM(children, i, child);
}
- /* Construct the state object. */
- noattrib = (self->extra == NULL || self->extra->attrib == Py_None);
- if (noattrib)
- instancedict = Py_BuildValue("{sOsOs{}sOsO}",
- PICKLED_TAG, self->tag,
- PICKLED_CHILDREN, children,
- PICKLED_ATTRIB,
- PICKLED_TEXT, JOIN_OBJ(self->text),
- PICKLED_TAIL, JOIN_OBJ(self->tail));
- else
- instancedict = Py_BuildValue("{sOsOsOsOsO}",
- PICKLED_TAG, self->tag,
- PICKLED_CHILDREN, children,
- PICKLED_ATTRIB, self->extra->attrib,
- PICKLED_TEXT, JOIN_OBJ(self->text),
- PICKLED_TAIL, JOIN_OBJ(self->tail));
- if (instancedict) {
- Py_DECREF(children);
- return instancedict;
+ if (self->extra && self->extra->attrib != Py_None) {
+ attrib = self->extra->attrib;
+ Py_INCREF(attrib);
}
else {
- for (i = 0; i < PyList_GET_SIZE(children); i++)
- Py_DECREF(PyList_GET_ITEM(children, i));
- Py_DECREF(children);
-
- return NULL;
+ attrib = PyDict_New();
+ if (!attrib) {
+ Py_DECREF(children);
+ return NULL;
+ }
}
+
+ return Py_BuildValue("{sOsNsNsOsO}",
+ PICKLED_TAG, self->tag,
+ PICKLED_CHILDREN, children,
+ PICKLED_ATTRIB, attrib,
+ PICKLED_TEXT, JOIN_OBJ(self->text),
+ PICKLED_TAIL, JOIN_OBJ(self->tail));
}
static PyObject *
Py_TYPE(self)->tp_free((PyObject *)self);
}
+Py_LOCAL_INLINE(int)
+_check_xmlparser(XMLParserObject* self)
+{
+ if (self->target == NULL) {
+ PyErr_SetString(PyExc_ValueError,
+ "XMLParser.__init__() wasn't called");
+ return 0;
+ }
+ return 1;
+}
+
LOCAL(PyObject*)
expat_parse(XMLParserObject* self, const char* data, int data_len, int final)
{
/* end feeding data to parser */
PyObject* res;
+
+ if (!_check_xmlparser(self)) {
+ return NULL;
+ }
res = expat_parse(self, "", 0, 1);
if (!res)
return NULL;
{
/* feed data to parser */
+ if (!_check_xmlparser(self)) {
+ return NULL;
+ }
if (PyUnicode_Check(data)) {
Py_ssize_t data_len;
const char *data_ptr = PyUnicode_AsUTF8AndSize(data, &data_len);
PyObject* temp;
PyObject* res;
+ if (!_check_xmlparser(self)) {
+ return NULL;
+ }
reader = PyObject_GetAttrString(file, "read");
if (!reader)
return NULL;
TreeBuilderObject *target;
PyObject *events_append, *events_seq;
+ if (!_check_xmlparser(self)) {
+ return NULL;
+ }
if (!TreeBuilder_CheckExact(self->target)) {
PyErr_SetString(
PyExc_TypeError,
else
goto generic;
+ if (!res && !_check_xmlparser(self)) {
+ return NULL;
+ }
Py_INCREF(res);
return res;
}
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c2b4ff081bac4be1]*/
+#ifndef OPENSSL_THREADS
+# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL"
+#endif
+
#define MUNCH_SIZE INT_MAX
#ifndef HASH_OBJ_CONSTRUCTOR
return 0;
}
-PyObject* _pysqlite_build_column_name(const char* colname)
+static PyObject *
+_pysqlite_build_column_name(pysqlite_Cursor *self, const char *colname)
{
const char* pos;
+ Py_ssize_t len;
if (!colname) {
Py_RETURN_NONE;
}
- for (pos = colname;; pos++) {
- if (*pos == 0 || *pos == '[') {
- if ((*pos == '[') && (pos > colname) && (*(pos-1) == ' ')) {
- pos--;
+ if (self->connection->detect_types & PARSE_COLNAMES) {
+ for (pos = colname; *pos; pos++) {
+ if (*pos == '[') {
+ if ((pos != colname) && (*(pos-1) == ' ')) {
+ pos--;
+ }
+ break;
}
- return PyUnicode_FromStringAndSize(colname, pos - colname);
}
+ len = pos - colname;
+ }
+ else {
+ len = strlen(colname);
}
+ return PyUnicode_FromStringAndSize(colname, len);
}
/*
PyObject* result;
int numcols;
PyObject* descriptor;
+ PyObject* column_name;
PyObject* second_argument = NULL;
sqlite_int64 lastrowid;
if (!descriptor) {
goto error;
}
- PyTuple_SetItem(descriptor, 0, _pysqlite_build_column_name(sqlite3_column_name(self->statement->st, i)));
+ column_name = _pysqlite_build_column_name(self,
+ sqlite3_column_name(self->statement->st, i));
+ if (!column_name) {
+ Py_DECREF(descriptor);
+ goto error;
+ }
+ PyTuple_SetItem(descriptor, 0, column_name);
Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 1, Py_None);
Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 2, Py_None);
Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 3, Py_None);
# endif
#endif
+#ifndef OPENSSL_THREADS
+# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL"
+#endif
+
/* SSL error object */
static PyObject *PySSLErrorObject;
static PyObject *PySSLCertVerificationErrorObject;
# define PY_OPENSSL_1_1_API 1
#endif
+/* OpenSSL API compat */
+#ifdef OPENSSL_API_COMPAT
+#if OPENSSL_API_COMPAT >= 0x10100000L
+
+/* OpenSSL API 1.1.0+ does not include version methods */
+#ifndef OPENSSL_NO_TLS1_METHOD
+#define OPENSSL_NO_TLS1_METHOD 1
+#endif
+#ifndef OPENSSL_NO_TLS1_1_METHOD
+#define OPENSSL_NO_TLS1_1_METHOD 1
+#endif
+#ifndef OPENSSL_NO_TLS1_2_METHOD
+#define OPENSSL_NO_TLS1_2_METHOD 1
+#endif
+
+#endif /* >= 1.1.0 compcat */
+#endif /* OPENSSL_API_COMPAT */
+
/* LibreSSL 2.7.0 provides necessary OpenSSL 1.1.0 APIs */
#if defined(LIBRESSL_VERSION_NUMBER) && LIBRESSL_VERSION_NUMBER >= 0x2070000fL
# define PY_OPENSSL_1_1_API 1
#endif
-/* Openssl comes with TLSv1.1 and TLSv1.2 between 1.0.0h and 1.0.1
- http://www.openssl.org/news/changelog.html
- */
-#if OPENSSL_VERSION_NUMBER >= 0x10001000L
-# define HAVE_TLSv1_2 1
-#else
-# define HAVE_TLSv1_2 0
-#endif
-
/* SNI support (client- and server-side) appeared in OpenSSL 1.0.0 and 0.9.8f
* This includes the SSL_set_SSL_CTX() function.
*/
#define TLS_method SSLv23_method
#define TLS_client_method SSLv23_client_method
#define TLS_server_method SSLv23_server_method
+#define ASN1_STRING_get0_data ASN1_STRING_data
+#define X509_get0_notBefore X509_get_notBefore
+#define X509_get0_notAfter X509_get_notAfter
+#define OpenSSL_version_num SSLeay
+#define OpenSSL_version SSLeay_version
+#define OPENSSL_VERSION SSLEAY_VERSION
static int X509_NAME_ENTRY_set(const X509_NAME_ENTRY *ne)
{
PY_SSL_VERSION_SSL2,
PY_SSL_VERSION_SSL3=1,
PY_SSL_VERSION_TLS, /* SSLv23 */
-#if HAVE_TLSv1_2
PY_SSL_VERSION_TLS1,
PY_SSL_VERSION_TLS1_1,
PY_SSL_VERSION_TLS1_2,
-#else
- PY_SSL_VERSION_TLS1,
-#endif
PY_SSL_VERSION_TLS_CLIENT=0x10,
PY_SSL_VERSION_TLS_SERVER,
};
goto error;
}
} else {
- if (!X509_VERIFY_PARAM_set1_ip(param, ASN1_STRING_data(ip),
+ if (!X509_VERIFY_PARAM_set1_ip(param, ASN1_STRING_get0_data(ip),
ASN1_STRING_length(ip))) {
_setSSLError(NULL, 0, __FILE__, __LINE__);
goto error;
goto fail;
}
PyTuple_SET_ITEM(t, 0, v);
- v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_data(as),
+ v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_get0_data(as),
ASN1_STRING_length(as));
if (v == NULL) {
Py_DECREF(t);
ASN1_INTEGER *serialNumber;
char buf[2048];
int len, result;
- ASN1_TIME *notBefore, *notAfter;
+ const ASN1_TIME *notBefore, *notAfter;
PyObject *pnotBefore, *pnotAfter;
retval = PyDict_New();
Py_DECREF(sn_obj);
(void) BIO_reset(biobuf);
- notBefore = X509_get_notBefore(certificate);
+ notBefore = X509_get0_notBefore(certificate);
ASN1_TIME_print(biobuf, notBefore);
len = BIO_gets(biobuf, buf, sizeof(buf)-1);
if (len < 0) {
Py_DECREF(pnotBefore);
(void) BIO_reset(biobuf);
- notAfter = X509_get_notAfter(certificate);
+ notAfter = X509_get0_notAfter(certificate);
ASN1_TIME_print(biobuf, notAfter);
len = BIO_gets(biobuf, buf, sizeof(buf)-1);
if (len < 0) {
#endif
PySSL_BEGIN_ALLOW_THREADS
- if (proto_version == PY_SSL_VERSION_TLS1)
+ switch(proto_version) {
+#if defined(SSL3_VERSION) && !defined(OPENSSL_NO_SSL3)
+ case PY_SSL_VERSION_SSL3:
+ ctx = SSL_CTX_new(SSLv3_method());
+ break;
+#endif
+#if (defined(TLS1_VERSION) && \
+ !defined(OPENSSL_NO_TLS1) && \
+ !defined(OPENSSL_NO_TLS1_METHOD))
+ case PY_SSL_VERSION_TLS1:
ctx = SSL_CTX_new(TLSv1_method());
-#if HAVE_TLSv1_2
- else if (proto_version == PY_SSL_VERSION_TLS1_1)
- ctx = SSL_CTX_new(TLSv1_1_method());
- else if (proto_version == PY_SSL_VERSION_TLS1_2)
- ctx = SSL_CTX_new(TLSv1_2_method());
+ break;
#endif
-#ifndef OPENSSL_NO_SSL3
- else if (proto_version == PY_SSL_VERSION_SSL3)
- ctx = SSL_CTX_new(SSLv3_method());
+#if (defined(TLS1_1_VERSION) && \
+ !defined(OPENSSL_NO_TLS1_1) && \
+ !defined(OPENSSL_NO_TLS1_1_METHOD))
+ case PY_SSL_VERSION_TLS1_1:
+ ctx = SSL_CTX_new(TLSv1_1_method());
+ break;
#endif
-#ifndef OPENSSL_NO_SSL2
- else if (proto_version == PY_SSL_VERSION_SSL2)
- ctx = SSL_CTX_new(SSLv2_method());
+#if (defined(TLS1_2_VERSION) && \
+ !defined(OPENSSL_NO_TLS1_2) && \
+ !defined(OPENSSL_NO_TLS1_2_METHOD))
+ case PY_SSL_VERSION_TLS1_2:
+ ctx = SSL_CTX_new(TLSv1_2_method());
+ break;
#endif
- else if (proto_version == PY_SSL_VERSION_TLS) /* SSLv23 */
+ case PY_SSL_VERSION_TLS:
+ /* SSLv23 */
ctx = SSL_CTX_new(TLS_method());
- else if (proto_version == PY_SSL_VERSION_TLS_CLIENT)
+ break;
+ case PY_SSL_VERSION_TLS_CLIENT:
ctx = SSL_CTX_new(TLS_client_method());
- else if (proto_version == PY_SSL_VERSION_TLS_SERVER)
+ break;
+ case PY_SSL_VERSION_TLS_SERVER:
ctx = SSL_CTX_new(TLS_server_method());
- else
+ break;
+ default:
proto_version = -1;
+ }
PySSL_END_ALLOW_THREADS
if (proto_version == -1) {
PyErr_SetString(PyExc_ValueError,
- "invalid protocol version");
+ "invalid or unsupported protocol version");
return NULL;
}
if (ctx == NULL) {
conservative and assume it wasn't fixed until release. We do this check
at runtime to avoid problems from the dynamic linker.
See #25672 for more on this. */
- libver = SSLeay();
+ libver = OpenSSL_version_num();
if (!(libver >= 0x10001000UL && libver < 0x1000108fUL) &&
!(libver >= 0x10000000UL && libver < 0x100000dfUL)) {
SSL_CTX_set_mode(self->ctx, SSL_MODE_RELEASE_BUFFERS);
if (bytes == NULL)
return NULL;
if (pseudo) {
+#ifdef PY_OPENSSL_1_1_API
+ ok = RAND_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len);
+#else
ok = RAND_pseudo_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len);
+#endif
if (ok == 0 || ok == 1)
return Py_BuildValue("NO", bytes, ok == 1 ? Py_True : Py_False);
}
if (!_setup_ssl_threads()) {
return NULL;
}
-#elif OPENSSL_VERSION_1_1 && defined(OPENSSL_THREADS)
+#elif OPENSSL_VERSION_1_1
/* OpenSSL 1.1.0 builtin thread support is enabled */
_ssl_locks_count++;
#endif
PY_SSL_VERSION_TLS_SERVER);
PyModule_AddIntConstant(m, "PROTOCOL_TLSv1",
PY_SSL_VERSION_TLS1);
-#if HAVE_TLSv1_2
PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_1",
PY_SSL_VERSION_TLS1_1);
PyModule_AddIntConstant(m, "PROTOCOL_TLSv1_2",
PY_SSL_VERSION_TLS1_2);
-#endif
/* protocol options */
PyModule_AddIntConstant(m, "OP_ALL",
PyModule_AddIntConstant(m, "OP_NO_SSLv2", SSL_OP_NO_SSLv2);
PyModule_AddIntConstant(m, "OP_NO_SSLv3", SSL_OP_NO_SSLv3);
PyModule_AddIntConstant(m, "OP_NO_TLSv1", SSL_OP_NO_TLSv1);
-#if HAVE_TLSv1_2
PyModule_AddIntConstant(m, "OP_NO_TLSv1_1", SSL_OP_NO_TLSv1_1);
PyModule_AddIntConstant(m, "OP_NO_TLSv1_2", SSL_OP_NO_TLSv1_2);
-#endif
#ifdef SSL_OP_NO_TLSv1_3
PyModule_AddIntConstant(m, "OP_NO_TLSv1_3", SSL_OP_NO_TLSv1_3);
#else
/* SSLeay() gives us the version of the library linked against,
which could be different from the headers version.
*/
- libver = SSLeay();
+ libver = OpenSSL_version_num();
r = PyLong_FromUnsignedLong(libver);
if (r == NULL)
return NULL;
r = Py_BuildValue("IIIII", major, minor, fix, patch, status);
if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION_INFO", r))
return NULL;
- r = PyUnicode_FromString(SSLeay_version(SSLEAY_VERSION));
+ r = PyUnicode_FromString(OpenSSL_version(OPENSSL_VERSION));
if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION", r))
return NULL;
size_t ncodes;
fmt = PyBytes_AS_STRING(self->s_format);
+ if (strlen(fmt) != (size_t)PyBytes_GET_SIZE(self->s_format)) {
+ PyErr_SetString(StructError, "embedded null character");
+ return -1;
+ }
f = whichtable(&fmt);
"unknown" float format */
if (ptr->format == 'd' || ptr->format == 'f')
break;
+ /* Skip _Bool, semantics are different for standard size */
+ if (ptr->format == '?')
+ break;
ptr->pack = native->pack;
ptr->unpack = native->unpack;
break;
}
+static PyObject*
+pynumber_tobase(PyObject *module, PyObject *args)
+{
+ PyObject *obj;
+ int base;
+ if (!PyArg_ParseTuple(args, "Oi:pynumber_tobase",
+ &obj, &base)) {
+ return NULL;
+ }
+ return PyNumber_ToBase(obj, base);
+}
+
+
static PyMethodDef TestMethods[] = {
{"raise_exception", raise_exception, METH_VARARGS},
{"raise_memoryerror", (PyCFunction)raise_memoryerror, METH_NOARGS},
{"get_global_config", get_global_config, METH_NOARGS},
{"get_core_config", get_core_config, METH_NOARGS},
{"get_main_config", get_main_config, METH_NOARGS},
+ {"pynumber_tobase", pynumber_tobase, METH_VARARGS},
{NULL, NULL} /* sentinel */
};
#endif /* defined(HAVE_WAITPID) */
-#if defined(HAVE_CWAIT)
+#if !defined(HAVE_WAITPID) && defined(HAVE_CWAIT)
PyDoc_STRVAR(os_waitpid__doc__,
"waitpid($module, pid, options, /)\n"
return return_value;
}
-#endif /* defined(HAVE_CWAIT) */
+#endif /* !defined(HAVE_WAITPID) && defined(HAVE_CWAIT) */
#if defined(HAVE_WAIT)
#ifndef OS_GETRANDOM_METHODDEF
#define OS_GETRANDOM_METHODDEF
#endif /* !defined(OS_GETRANDOM_METHODDEF) */
-/*[clinic end generated code: output=32c935671ee020d5 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=f6eff86ac86bfce4 input=a9049054013a1b77]*/
"variable", (Py_ssize_t)len);
}
config->program_name = program_name;
+
+ /*
+ * This environment variable is used to communicate between
+ * the stub launcher and the real interpreter and isn't needed
+ * beyond this point.
+ *
+ * Clean up to avoid problems when launching other programs
+ * later on.
+ */
+ (void)unsetenv("__PYVENV_LAUNCHER__");
+
return _Py_INIT_OK();
}
}
if (! PyArg_UnpackTuple(args, funcname, 2, 2, &ox, &oy))
return NULL;
x = PyFloat_AsDouble(ox);
+ if (x == -1.0 && PyErr_Occurred()) {
+ return NULL;
+ }
y = PyFloat_AsDouble(oy);
- if ((x == -1.0 || y == -1.0) && PyErr_Occurred())
+ if (y == -1.0 && PyErr_Occurred()) {
return NULL;
+ }
errno = 0;
PyFPE_START_PROTECT("in math_2", return 0);
r = (*func)(x, y);
arg = dsp.xxx(arg)
*/
static PyObject *
-_do_ioctl_1(int fd, PyObject *args, char *fname, int cmd)
+_do_ioctl_1(int fd, PyObject *args, char *fname, unsigned long cmd)
{
char argfmt[33] = "i:";
int arg;
way.
*/
static PyObject *
-_do_ioctl_1_internal(int fd, PyObject *args, char *fname, int cmd)
+_do_ioctl_1_internal(int fd, PyObject *args, char *fname, unsigned long cmd)
{
char argfmt[32] = ":";
int arg = 0;
/* _do_ioctl_0() is a private helper for the no-argument ioctls:
SNDCTL_DSP_{SYNC,RESET,POST}. */
static PyObject *
-_do_ioctl_0(int fd, PyObject *args, char *fname, int cmd)
+_do_ioctl_0(int fd, PyObject *args, char *fname, unsigned long cmd)
{
char argfmt[32] = ":";
int rv;
return NULL;
#endif
+ while (1) {
#ifdef __APPLE__
- groups = PyMem_New(int, ngroups);
+ groups = PyMem_New(int, ngroups);
#else
- groups = PyMem_New(gid_t, ngroups);
+ groups = PyMem_New(gid_t, ngroups);
#endif
- if (groups == NULL)
- return PyErr_NoMemory();
+ if (groups == NULL) {
+ return PyErr_NoMemory();
+ }
- if (getgrouplist(user, basegid, groups, &ngroups) == -1) {
- PyMem_Del(groups);
- return posix_error();
+ int old_ngroups = ngroups;
+ if (getgrouplist(user, basegid, groups, &ngroups) != -1) {
+ /* Success */
+ break;
+ }
+
+ /* getgrouplist() fails if the group list is too small */
+ PyMem_Free(groups);
+
+ if (ngroups > old_ngroups) {
+ /* If the group list is too small, the glibc implementation of
+ getgrouplist() sets ngroups to the total number of groups and
+ returns -1. */
+ }
+ else {
+ /* Double the group list size */
+ if (ngroups > INT_MAX / 2) {
+ return PyErr_NoMemory();
+ }
+ ngroups *= 2;
+ }
+
+ /* Retry getgrouplist() with a larger group list */
}
#ifdef _Py_MEMORY_SANITIZER
if (res < 0)
return (!async_err) ? posix_error() : NULL;
+ unsigned long long ustatus = (unsigned int)status;
+
/* shift the status left a byte so this is more like the POSIX waitpid */
- return Py_BuildValue(_Py_PARSE_INTPTR "i", res, status << 8);
+ return Py_BuildValue(_Py_PARSE_INTPTR "K", res, ustatus << 8);
}
#endif
-#ifdef HAVE_SYMLINK
-
#if defined(MS_WINDOWS)
/* Grab CreateSymbolicLinkW dynamically from kernel32 */
return 0;
}
+#endif
+
+#ifdef HAVE_SYMLINK
+
+#if defined(MS_WINDOWS)
+
/* Is this path absolute? */
static int
_is_absW(const WCHAR *path)
}
#endif /* SELECT_USES_HEAP */
- /* Convert sequences to fd_sets, and get maximum fd number
+ /* Convert iterables to fd_sets, and get maximum fd number
* propagates the Python exception set in seq2set()
*/
rfd2obj[0].sentinel = -1;
"select(rlist, wlist, xlist[, timeout]) -> (rlist, wlist, xlist)\n\
\n\
Wait until one or more file descriptors are ready for some kind of I/O.\n\
-The first three arguments are sequences of file descriptors to be waited for:\n\
+The first three arguments are iterables of file descriptors to be waited for:\n\
rlist -- wait until ready for reading\n\
wlist -- wait until ready for writing\n\
xlist -- wait for an ``exceptional condition''\n\
PyObject *
PyNumber_ToBase(PyObject *n, int base)
{
- PyObject *res = NULL;
+ if (!(base == 2 || base == 8 || base == 10 || base == 16)) {
+ PyErr_SetString(PyExc_SystemError,
+ "PyNumber_ToBase: base must be 2, 8, 10 or 16");
+ return NULL;
+ }
PyObject *index = PyNumber_Index(n);
-
if (!index)
return NULL;
- if (PyLong_Check(index))
- res = _PyLong_Format(index, base);
- else
- /* It should not be possible to get here, as
- PyNumber_Index already has a check for the same
- condition */
- PyErr_SetString(PyExc_ValueError, "PyNumber_ToBase: index not int");
+ PyObject *res = _PyLong_Format(index, base);
Py_DECREF(index);
return res;
}
}
default:
- PyErr_SetString(PyExc_SystemError,
- "Bad call flags in _PyMethodDef_RawFastCallDict. "
- "METH_OLDARGS is no longer supported!");
+ PyErr_Format(PyExc_SystemError,
+ "%s() method: bad call flags", method->ml_name);
goto exit;
}
}
default:
- PyErr_SetString(PyExc_SystemError,
- "Bad call flags in _PyMethodDef_RawFastCallKeywords. "
- "METH_OLDARGS is no longer supported!");
+ PyErr_Format(PyExc_SystemError,
+ "%s() method: bad call flags", method->ml_name);
goto exit;
}
return NULL;
assert(PyTuple_Check(tmp));
newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
- if (newobj == NULL)
+ if (newobj == NULL) {
+ Py_DECREF(tmp);
return NULL;
+ }
for (i = 0; i < n; i++) {
item = PyTuple_GET_ITEM(tmp, i);
Py_INCREF(item);
goto done;
}
/* Find out how big our core buffer is, and how many subkeys we have */
- rc = RegQueryInfoKey(newKey, NULL, NULL, NULL, &numKeys, NULL, NULL,
+ rc = RegQueryInfoKeyW(newKey, NULL, NULL, NULL, &numKeys, NULL, NULL,
NULL, NULL, &dataSize, NULL, NULL);
if (rc!=ERROR_SUCCESS) {
goto done;
if ns.include_stable:
yield from in_build(PYTHON_STABLE_DLL_NAME)
+ found_any = False
for dest, src in rglob(ns.build, "vcruntime*.dll"):
+ found_any = True
yield dest, src
+ if not found_any:
+ log_error("Failed to locate vcruntime DLL in the build.")
yield "LICENSE.txt", ns.source / "LICENSE"
PROPS_TEMPLATE = r"""<?xml version="1.0" encoding="utf-8"?>
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup Condition="$(Platform) == '{PYTHON_PLATFORM}'">
- <PythonHome Condition="$(Configuration) == 'Debug'">$([msbuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory), "python_d.exe")</PythonHome>
- <PythonHome Condition="$(PythonHome) == ''">$([msbuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory), "python.exe")</PythonHome>
+ <PythonHome Condition="$(PythonHome) == ''">$([System.IO.Path]::GetFullPath("$(MSBuildThisFileDirectory)\..\..\tools"))</PythonHome>
<PythonInclude>$(PythonHome)\include</PythonInclude>
<PythonLibs>$(PythonHome)\libs</PythonLibs>
<PythonTag>{PYTHON_TAG}</PythonTag>
PyObject *l;
PyObject *ret;
- if ((rc = RegQueryInfoKey(key, NULL, NULL, 0, &nSubKeys, NULL, NULL,
- &nValues, NULL, NULL, NULL, &ft))
+ if ((rc = RegQueryInfoKeyW(key, NULL, NULL, 0, &nSubKeys, NULL, NULL,
+ &nValues, NULL, NULL, NULL, &ft))
!= ERROR_SUCCESS)
return PyErr_SetFromWindowsErrWithFunction(rc, "RegQueryInfoKey");
li.LowPart = ft.dwLowDateTime;
@rem VS 2017 and later provide vswhere.exe, which can be used\r
@if not exist "%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" goto :skip_vswhere\r
@set _Py_MSBuild_Root=\r
-@for /F "tokens=*" %%i in ('"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -property installationPath -latest') DO @(set _Py_MSBuild_Root=%%i\MSBuild)\r
+@for /F "tokens=*" %%i in ('"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -property installationPath -latest -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64') DO @(set _Py_MSBuild_Root=%%i\MSBuild)\r
@if not defined _Py_MSBuild_Root goto :skip_vswhere\r
@for %%j in (Current 15.0) DO @if exist "%_Py_MSBuild_Root%\%%j\Bin\msbuild.exe" (set MSBUILD="%_Py_MSBuild_Root%\%%j\Bin\msbuild.exe")\r
@set _Py_MSBuild_Root=\r
@if defined MSBUILD @if exist %MSBUILD% (set _Py_MSBuild_Source=Visual Studio installation) & goto :found\r
:skip_vswhere\r
\r
-@rem VS 2017 sets exactly one install as the "main" install, so we may find MSBuild in there.\r
-@reg query "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\VisualStudio\SxS\VS7" /v 15.0 /reg:32 >nul 2>nul\r
-@if NOT ERRORLEVEL 1 @for /F "tokens=1,2*" %%i in ('reg query "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\VisualStudio\SxS\VS7" /v 15.0 /reg:32') DO @(\r
- @if "%%i"=="15.0" @if exist "%%k\MSBuild\15.0\Bin\msbuild.exe" @(set MSBUILD="%%k\MSBuild\15.0\Bin\msbuild.exe")\r
-)\r
-@if exist %MSBUILD% (set _Py_MSBuild_Source=Visual Studio 2017 registry) & goto :found\r
-\r
@rem VS 2015 and earlier register MSBuild separately, so we can find it.\r
@reg query "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\MSBuild\ToolsVersions\14.0" /v MSBuildToolsPath /reg:32 >nul 2>nul\r
@if NOT ERRORLEVEL 1 @for /F "tokens=1,2*" %%i in ('reg query "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\MSBuild\ToolsVersions\14.0" /v MSBuildToolsPath /reg:32') DO @(\r
\r
set libraries=\r
set libraries=%libraries% bzip2-1.0.6\r
-if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1d\r
+if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1g\r
set libraries=%libraries% sqlite-3.31.1.0\r
if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.9.0\r
if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.9.0\r
echo.Fetching external binaries...\r
\r
set binaries=\r
-if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1d\r
+if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1g\r
if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.9.0\r
if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06\r
\r
<Content Include="idlelib\Icons\idle_32.png" />
<Content Include="idlelib\Icons\idle_48.gif" />
<Content Include="idlelib\Icons\idle_48.png" />
+ <Content Include="idlelib\Icons\idle_256.png" />
<Content Include="idlelib\Icons\minusnode.gif" />
<Content Include="idlelib\Icons\openfolder.gif" />
<Content Include="idlelib\Icons\plusnode.gif" />
</PropertyGroup>\r
<ItemDefinitionGroup>\r
<ClCompile>\r
- <PreprocessorDefinitions>WIN32;HAVE_CONFIG_H;_DEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
- <WarningLevel>Level3</WarningLevel>\r
- <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>\r
- <Optimization>Disabled</Optimization>\r
- <AdditionalIncludeDirectories>$(lzmaDir)windows;$(lzmaDir)src/liblzma/common;$(lzmaDir)src/common;$(lzmaDir)src/liblzma/api;$(lzmaDir)src/liblzma/check;$(lzmaDir)src/liblzma/delta;$(lzmaDir)src/liblzma/lz;$(lzmaDir)src/liblzma/lzma;$(lzmaDir)src/liblzma/rangecoder;$(lzmaDir)src/liblzma/simple</AdditionalIncludeDirectories>\r
+ <PreprocessorDefinitions>WIN32;HAVE_CONFIG_H;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>\r
+ <AdditionalIncludeDirectories>$(lzmaDir)windows;$(lzmaDir)src/liblzma/common;$(lzmaDir)src/common;$(lzmaDir)src/liblzma/api;$(lzmaDir)src/liblzma/check;$(lzmaDir)src/liblzma/delta;$(lzmaDir)src/liblzma/lz;$(lzmaDir)src/liblzma/lzma;$(lzmaDir)src/liblzma/rangecoder;$(lzmaDir)src/liblzma/simple;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>\r
<DisableSpecificWarnings>4028;4113;4133;4244;4267;4996;%(DisableSpecificWarnings)</DisableSpecificWarnings>\r
</ClCompile>\r
</ItemDefinitionGroup>\r
<Error Text="Unable to locate signtool.exe. Set /p:SignToolPath and rebuild" Condition="'$(_SignCommand)' == ''" />\r
<Exec Command='$(_SignCommand) "$(TargetPath)" || $(_SignCommand) "$(TargetPath)" || $(_SignCommand) "$(TargetPath)"' ContinueOnError="false" />\r
</Target>\r
-</Project>
\ No newline at end of file
+\r
+\r
+ <Target Name="FindVCRedistDir">\r
+ <!-- Hard coded path for VS 2015 -->\r
+ <PropertyGroup Condition="$(PlatformToolset) == 'v140'">\r
+ <VCRedistDir>$(VCInstallDir)\redist\</VCRedistDir>\r
+ </PropertyGroup>\r
+\r
+ <!-- Search for version number in some broken Build Tools installs -->\r
+ <ItemGroup Condition="$(VCRedistDir) == '' and $(VCToolsRedistVersion) == ''">\r
+ <_RedistFiles Include="$(VCInstallDir)\Redist\MSVC\*\*.*" />\r
+ </ItemGroup>\r
+ <PropertyGroup Condition="$(VCRedistDir) == '' and $(VCToolsRedistVersion) == ''">\r
+ <_RedistDir>%(_RedistFiles.Directory)</_RedistDir>\r
+ <VCToolsRedistVersion>$([System.IO.Path]::GetFileName($(_RedistDir.Trim(`\`))))</VCToolsRedistVersion>\r
+ </PropertyGroup>\r
+\r
+ <!-- Use correct path for VS 2017 and later -->\r
+ <PropertyGroup Condition="$(VCRedistDir) == ''">\r
+ <VCRedistDir>$(VCInstallDir)\Redist\MSVC\$(VCToolsRedistVersion)\</VCRedistDir>\r
+ </PropertyGroup>\r
+\r
+ <PropertyGroup>\r
+ <VCRedistDir Condition="$(Platform) == 'Win32'">$(VCRedistDir)x86\</VCRedistDir>\r
+ <VCRedistDir Condition="$(Platform) != 'Win32'">$(VCRedistDir)$(Platform)\</VCRedistDir>\r
+ </PropertyGroup>\r
+\r
+ <Message Text="VC Redist Directory: $(VCRedistDir)" />\r
+ <Message Text="VC Redist Version: $(VCToolsRedistVersion)" />\r
+ </Target>\r
+\r
+ <Target Name="FindVCRuntime" Returns="VCRuntimeDLL" DependsOnTargets="FindVCRedistDir">\r
+ <ItemGroup Condition="$(VCInstallDir) != ''">\r
+ <VCRuntimeDLL Include="$(VCRedistDir)\Microsoft.VC*.CRT\vcruntime*.dll" />\r
+ </ItemGroup>\r
+\r
+ <Warning Text="vcruntime*.dll not found under $(VCRedistDir)." Condition="@(VCRuntimeDLL) == ''" />\r
+ <Message Text="VC Runtime DLL(s):%0A- @(VCRuntimeDLL,'%0A- ')" />\r
+ </Target>\r
+</Project>\r
<sqlite3Dir>$(ExternalsDir)sqlite-3.31.1.0\</sqlite3Dir>\r
<bz2Dir>$(ExternalsDir)bzip2-1.0.6\</bz2Dir>\r
<lzmaDir>$(ExternalsDir)xz-5.2.2\</lzmaDir>\r
- <opensslDir>$(ExternalsDir)openssl-1.1.1d\</opensslDir>\r
- <opensslOutDir>$(ExternalsDir)openssl-bin-1.1.1d\$(ArchName)\</opensslOutDir>\r
+ <opensslDir>$(ExternalsDir)openssl-1.1.1g\</opensslDir>\r
+ <opensslOutDir>$(ExternalsDir)openssl-bin-1.1.1g\$(ArchName)\</opensslOutDir>\r
<opensslIncludeDir>$(opensslOutDir)include</opensslIncludeDir>\r
<nasmDir>$(ExternalsDir)\nasm-2.11.06\</nasmDir>\r
<zlibDir>$(ExternalsDir)\zlib-1.2.11\</zlibDir>\r
<Warning Text="Not including zlib is not a supported configuration." />\r
</Target>\r
\r
- <PropertyGroup>\r
- <VCRedistDir>$(VCInstallDir)\Redist\MSVC\$(VCToolsRedistVersion)\</VCRedistDir>\r
- <VCRedistDir Condition="$(Platform) == 'Win32'">$(VCRedistDir)x86\</VCRedistDir>\r
- <VCRedistDir Condition="$(Platform) != 'Win32'">$(VCRedistDir)$(Platform)\</VCRedistDir>\r
- </PropertyGroup>\r
- <ItemGroup Condition="$(VCInstallDir) != ''">\r
- <VCRuntimeDLL Include="$(VCRedistDir)\**\vcruntime*.dll" />\r
- </ItemGroup>\r
- <Target Name="_CopyVCRuntime" AfterTargets="Build" Inputs="@(VCRuntimeDLL)" Outputs="$(OutDir)%(Filename)%(Extension)">\r
+ <Target Name="_CopyVCRuntime" AfterTargets="Build" Inputs="@(VCRuntimeDLL)" Outputs="$(OutDir)%(Filename)%(Extension)" DependsOnTargets="FindVCRuntime">\r
<!-- bpo-38597: When we switch to another VCRuntime DLL, include vcruntime140.dll as well -->\r
<Warning Text="A copy of vcruntime140.dll is also required" Condition="!$(VCToolsRedistVersion.StartsWith(`14.`))" />\r
<Copy SourceFiles="%(VCRuntimeDLL.FullPath)" DestinationFolder="$(OutDir)" />\r
<SubSystem>Windows</SubSystem>\r
</Link>\r
</ItemDefinitionGroup>\r
+ <ItemDefinitionGroup Condition="$(Configuration) != 'Debug'">\r
+ <ClCompile>\r
+ <RuntimeLibrary>Multithreaded</RuntimeLibrary>\r
+ </ClCompile>\r
+ <Link>\r
+ <AdditionalDependencies>ucrt.lib;%(AdditionalDependencies)</AdditionalDependencies>\r
+ <IgnoreSpecificDefaultLibraries>libucrt;%(IgnoreSpecificDefaultLibraries)</IgnoreSpecificDefaultLibraries>\r
+ </Link>\r
+ </ItemDefinitionGroup>\r
<ItemGroup>\r
<None Include="..\PC\pyconw.ico" />\r
</ItemGroup>\r
*registry = PyDict_New();
if (*registry == NULL)
- goto handle_error;
+ return 0;
rc = PyDict_SetItemString(globals, "__warningregistry__", *registry);
if (rc < 0)
dangling reference. */
Py_XDECREF(*registry);
Py_XDECREF(*module);
- Py_XDECREF(*filename);
return 0;
}
APPEND_STR_IF(i > 0, ", ");
APPEND(slice, (slice_ty)asdl_seq_GET(slice->v.ExtSlice.dims, i));
}
+ APPEND_STR_IF(dims_count == 1, ",");
+ return 0;
+}
+
+static int
+append_ast_index_slice(_PyUnicodeWriter *writer, slice_ty slice)
+{
+ int level = PR_TUPLE;
+ expr_ty value = slice->v.Index.value;
+ if (value->kind == Tuple_kind) {
+ for (Py_ssize_t i = 0; i < asdl_seq_LEN(value->v.Tuple.elts); i++) {
+ expr_ty element = asdl_seq_GET(value->v.Tuple.elts, i);
+ if (element->kind == Starred_kind) {
+ ++level;
+ break;
+ }
+ }
+ }
+ APPEND_EXPR(value, level);
return 0;
}
case ExtSlice_kind:
return append_ast_ext_slice(writer, slice);
case Index_kind:
- APPEND_EXPR(slice->v.Index.value, PR_TUPLE);
- return 0;
+ return append_ast_index_slice(writer, slice);
default:
PyErr_SetString(PyExc_SystemError,
"unexpected slice kind");
win32_urandom_init(int raise)
{
/* Acquire context */
- if (!CryptAcquireContext(&hCryptProv, NULL, NULL,
- PROV_RSA_FULL, CRYPT_VERIFYCONTEXT))
+ if (!CryptAcquireContextW(&hCryptProv, NULL, NULL,
+ PROV_RSA_FULL, CRYPT_VERIFYCONTEXT))
goto error;
return 0;
static void
remove_module(PyObject *name)
{
+ PyObject *type, *value, *traceback;
+ PyErr_Fetch(&type, &value, &traceback);
PyObject *modules = PyImport_GetModuleDict();
+ if (!PyMapping_HasKey(modules, name)) {
+ goto out;
+ }
if (PyMapping_DelItem(modules, name) < 0) {
- if (!PyMapping_HasKey(modules, name)) {
- return;
- }
Py_FatalError("import: deleting existing key in "
"sys.modules failed");
}
+out:
+ PyErr_Restore(type, value, traceback);
}
PyObject *
PyImport_ReloadModule(PyObject *m)
{
- _Py_IDENTIFIER(imp);
+ _Py_IDENTIFIER(importlib);
_Py_IDENTIFIER(reload);
PyObject *reloaded_module = NULL;
- PyObject *imp = _PyImport_GetModuleId(&PyId_imp);
- if (imp == NULL) {
+ PyObject *importlib = _PyImport_GetModuleId(&PyId_importlib);
+ if (importlib == NULL) {
if (PyErr_Occurred()) {
return NULL;
}
- imp = PyImport_ImportModule("imp");
- if (imp == NULL) {
+ importlib = PyImport_ImportModule("importlib");
+ if (importlib == NULL) {
return NULL;
}
}
- reloaded_module = _PyObject_CallMethodIdObjArgs(imp, &PyId_reload, m, NULL);
- Py_DECREF(imp);
+ reloaded_module = _PyObject_CallMethodIdObjArgs(importlib, &PyId_reload, m, NULL);
+ Py_DECREF(importlib);
return reloaded_module;
}
void
PyThreadState_Clear(PyThreadState *tstate)
{
- if (Py_VerboseFlag && tstate->frame != NULL)
+ if (Py_VerboseFlag && tstate->frame != NULL) {
+ /* bpo-20526: After the main thread calls
+ _PyRuntimeState_SetFinalizing() in Py_FinalizeEx(), threads must
+ exit when trying to take the GIL. If a thread exit in the middle of
+ _PyEval_EvalFrameDefault(), tstate->frame is not reset to its
+ previous value. It is more likely with daemon threads, but it can
+ happen with regular threads if threading._shutdown() fails
+ (ex: interrupted by CTRL+C). */
fprintf(stderr,
"PyThreadState_Clear: warning: thread still has a frame\n");
+ }
- Py_CLEAR(tstate->frame);
+ /* Don't clear tstate->frame: it is a borrowed reference */
Py_CLEAR(tstate->dict);
Py_CLEAR(tstate->async_exc);
#include "Python.h"
#ifdef MS_WINDOWS
-#include <windows.h>
+#include <winsock2.h> /* struct timeval */
#endif
#if defined(__APPLE__)
}
} else if (milliseconds != 0) {
/* wait at least until the target */
- DWORD now, target = GetTickCount() + milliseconds;
+ ULONGLONG now, target = GetTickCount64() + milliseconds;
while (mutex->locked) {
if (PyCOND_TIMEDWAIT(&mutex->cv, &mutex->cs, (long long)milliseconds*1000) < 0) {
result = WAIT_FAILED;
break;
}
- now = GetTickCount();
+ now = GetTickCount64();
if (target <= now)
break;
- milliseconds = target-now;
+ milliseconds = (DWORD)(target-now);
}
}
if (!mutex->locked) {
-This is Python version 3.7.7
+This is Python version 3.7.8
============================
.. image:: https://travis-ci.org/python/cpython.svg?branch=3.7
token = fields[0].lower()
condition = ' '.join(fields[1:]).strip()
- if_tokens = {'if', 'ifdef', 'ifndef'}
- all_tokens = if_tokens | {'elif', 'else', 'endif'}
-
- if token not in all_tokens:
- return
-
- # cheat a little here, to reuse the implementation of if
- if token == 'elif':
- pop_stack()
- token = 'if'
-
- if token in if_tokens:
+ if token in {'if', 'ifdef', 'ifndef', 'elif'}:
if not condition:
self.fail("Invalid format for #" + token + " line: no argument!")
- if token == 'if':
+ if token in {'if', 'elif'}:
if not self.is_a_simple_defined(condition):
condition = "(" + condition + ")"
+ if token == 'elif':
+ previous_token, previous_condition = pop_stack()
+ self.stack.append((previous_token, negate(previous_condition)))
else:
fields = condition.split()
if len(fields) != 1:
condition = 'defined(' + symbol + ')'
if token == 'ifndef':
condition = '!' + condition
+ token = 'if'
- self.stack.append(("if", condition))
- if self.verbose:
- print(self.status())
- return
+ self.stack.append((token, condition))
- previous_token, previous_condition = pop_stack()
+ elif token == 'else':
+ previous_token, previous_condition = pop_stack()
+ self.stack.append((previous_token, negate(previous_condition)))
- if token == 'else':
- self.stack.append(('else', negate(previous_condition)))
elif token == 'endif':
- pass
+ while pop_stack()[0] != 'if':
+ pass
+
+ else:
+ return
+
if self.verbose:
print(self.status())
ENCODING = locale.getpreferredencoding()
+FRAME_INFO_OPTIMIZED_OUT = '(frame information optimized out)'
+UNABLE_READ_INFO_PYTHON_FRAME = 'Unable to read information on python frame'
EVALFRAME = '_PyEval_EvalFrameDefault'
class NullPyObjectPtr(RuntimeError):
def filename(self):
'''Get the path of the current Python source file, as a string'''
if self.is_optimized_out():
- return '(frame information optimized out)'
+ return FRAME_INFO_OPTIMIZED_OUT
return self.co_filename.proxyval(set())
def current_line_num(self):
'''Get the text of the current source line as a string, with a trailing
newline character'''
if self.is_optimized_out():
- return '(frame information optimized out)'
+ return FRAME_INFO_OPTIMIZED_OUT
lineno = self.current_line_num()
if lineno is None:
def write_repr(self, out, visited):
if self.is_optimized_out():
- out.write('(frame information optimized out)')
+ out.write(FRAME_INFO_OPTIMIZED_OUT)
return
lineno = self.current_line_num()
lineno = str(lineno) if lineno is not None else "?"
def print_traceback(self):
if self.is_optimized_out():
- sys.stdout.write(' (frame information optimized out)\n')
+ sys.stdout.write(' %s\n' % FRAME_INFO_OPTIMIZED_OUT)
return
visited = set()
lineno = self.current_line_num()
# This assumes the _POSIX_THREADS version of Python/ceval_gil.h:
name = self._gdbframe.name()
if name:
- return 'pthread_cond_timedwait' in name
+ return (name == 'take_gil')
def is_gc_collect(self):
'''Is this frame "collect" within the garbage-collector?'''
pyop = frame.get_pyop()
if not pyop or pyop.is_optimized_out():
- print('Unable to read information on python frame')
+ print(UNABLE_READ_INFO_PYTHON_FRAME)
return
filename = pyop.filename()
pyop_frame = frame.get_pyop()
if not pyop_frame:
- print('Unable to read information on python frame')
+ print(UNABLE_READ_INFO_PYTHON_FRAME)
return
pyop_var, scope = pyop_frame.get_var_by_name(name)
pyop_frame = frame.get_pyop()
if not pyop_frame:
- print('Unable to read information on python frame')
+ print(UNABLE_READ_INFO_PYTHON_FRAME)
return
for pyop_name, pyop_value in pyop_frame.iter_locals():
<SuppressICEs>ICE43</SuppressICEs>
</PropertyGroup>
<Import Project="..\msi.props" />
+ <PropertyGroup Condition="exists('$(BuildPath)vcruntime140_1.dll')">
+ <DefineConstants>$(DefineConstants);Include_Vcruntime140_1_dll=1</DefineConstants>
+ </PropertyGroup>
<ItemGroup>
<Compile Include="exe.wxs" />
<Compile Include="exe_files.wxs" />
</RegistryKey>
</Component>
<Component Id="vcruntime140.dll" Directory="InstallDirectory" Guid="*">
- <File Name="vcruntime140.dll" Source="!(bindpath.redist)vcruntime140.dll" KeyPath="yes" />
+ <File Name="vcruntime140.dll" Source="vcruntime140.dll" KeyPath="yes" />
</Component>
+<?ifdef Include_Vcruntime140_1_dll ?>
+ <Component Id="vcruntime140_1.dll" Directory="InstallDirectory" Guid="*">
+ <File Name="vcruntime140_1.dll" Source="vcruntime140_1.dll" KeyPath="yes" />
+ </Component>
+<?endif ?>
</ComponentGroup>
</Fragment>
<RegistryValue KeyPath="yes" Root="HKMU" Key="Software\Python\PyLauncher" Name="AssociateFiles" Value="1" Type="integer" />
<ProgId Id="Python.File" Description="!(loc.PythonFileDescription)" Advertise="no" Icon="py.exe" IconIndex="1">
- <Extension Id="py" ContentType="text/plain">
+ <Extension Id="py" ContentType="text/x-python">
<Verb Id="open" TargetFile="py.exe" Argument=""%L" %*" />
</Extension>
</ProgId>
<RegistryValue Root="HKCR" Key="Python.File\shellex\DropHandler" Value="{BEA218D2-6950-497B-9434-61683EC065FE}" Type="string" />
<ProgId Id="Python.NoConFile" Description="!(loc.PythonNoConFileDescription)" Advertise="no" Icon="py.exe" IconIndex="1">
- <Extension Id="pyw" ContentType="text/plain">
+ <Extension Id="pyw" ContentType="text/x-python">
<Verb Id="open" TargetFile="pyw.exe" Argument=""%L" %*" />
</Extension>
</ProgId>
def _Subscript(self, t):
self.dispatch(t.value)
self.write("[")
- self.dispatch(t.slice)
+ if (isinstance(t.slice, ast.Index)
+ and isinstance(t.slice.value, ast.Tuple)
+ and t.slice.value.elts):
+ if len(t.slice.value.elts) == 1:
+ elt = t.slice.value.elts[0]
+ self.dispatch(elt)
+ self.write(",")
+ else:
+ interleave(lambda: self.write(", "), self.dispatch, t.slice.value.elts)
+ else:
+ self.dispatch(t.slice)
self.write("]")
def _Starred(self, t):
self.dispatch(t.step)
def _ExtSlice(self, t):
- interleave(lambda: self.write(', '), self.dispatch, t.dims)
+ if len(t.dims) == 1:
+ elt = t.dims[0]
+ self.dispatch(elt)
+ self.write(",")
+ else:
+ interleave(lambda: self.write(', '), self.dispatch, t.dims)
# argument
def _arg(self, t):
import os
try:
from urllib.request import urlopen
+ from urllib.error import HTTPError
except ImportError:
- from urllib2 import urlopen
-import subprocess
+ from urllib2 import urlopen, HTTPError
import shutil
+import string
+import subprocess
import sys
import tarfile
log = logging.getLogger("multissl")
OPENSSL_OLD_VERSIONS = [
- "1.0.2",
+ "1.0.2u",
+ "1.1.0l",
]
OPENSSL_RECENT_VERSIONS = [
- "1.0.2t",
- "1.1.0l",
- "1.1.1d",
+ "1.1.1g",
+ # "3.0.0-alpha2"
]
LIBRESSL_OLD_VERSIONS = [
+ "2.9.2",
]
LIBRESSL_RECENT_VERSIONS = [
- "2.9.2",
+ "3.1.0",
]
# store files in ../multissl
parser.add_argument(
'--disable-ancient',
action='store_true',
- help="Don't test OpenSSL < 1.0.2 and LibreSSL < 2.5.3.",
+ help="Don't test OpenSSL and LibreSSL versions without upstream support",
)
parser.add_argument(
'--openssl',
help="Keep original sources for debugging."
)
+OPENSSL_FIPS_CNF = """\
+openssl_conf = openssl_init
+
+.include {self.install_dir}/ssl/fipsinstall.cnf
+# .include {self.install_dir}/ssl/openssl.cnf
+
+[openssl_init]
+providers = provider_sect
+
+[provider_sect]
+fips = fips_sect
+default = default_sect
+
+[default_sect]
+activate = 1
+"""
+
class AbstractBuilder(object):
library = None
- url_template = None
+ url_templates = None
src_template = None
build_template = None
install_target = 'install'
return hash((self.library, self.version))
@property
+ def short_version(self):
+ """Short version for OpenSSL download URL"""
+ return None
+
+ @property
def openssl_cli(self):
"""openssl CLI binary"""
return os.path.join(self.install_dir, "bin", "openssl")
src_dir = os.path.dirname(self.src_file)
if not os.path.isdir(src_dir):
os.makedirs(src_dir)
- url = self.url_template.format(self.version)
- log.info("Downloading from {}".format(url))
- req = urlopen(url)
- # KISS, read all, write all
- data = req.read()
+ data = None
+ for url_template in self.url_templates:
+ url = url_template.format(v=self.version, s=self.short_version)
+ log.info("Downloading from {}".format(url))
+ try:
+ req = urlopen(url)
+ # KISS, read all, write all
+ data = req.read()
+ except HTTPError as e:
+ log.error(
+ "Download from {} has from failed: {}".format(url, e)
+ )
+ else:
+ log.info("Successfully downloaded from {}".format(url))
+ break
+ if data is None:
+ raise ValueError("All download URLs have failed")
log.info("Storing {}".format(self.src_file))
with open(self.src_file, "wb") as f:
f.write(data)
"shared", "--debug",
"--prefix={}".format(self.install_dir)
]
+ # cmd.extend(["no-deprecated", "--api=1.1.0"])
env = os.environ.copy()
# set rpath
env["LD_RUN_PATH"] = self.lib_dir
["make", "-j1", self.install_target],
cwd=self.build_dir
)
+ self._post_install()
if not self.args.keep_sources:
shutil.rmtree(self.build_dir)
+ def _post_install(self):
+ pass
+
def install(self):
log.info(self.openssl_cli)
if not self.has_openssl or self.args.force:
class BuildOpenSSL(AbstractBuilder):
library = "OpenSSL"
- url_template = "https://www.openssl.org/source/openssl-{}.tar.gz"
+ url_templates = (
+ "https://www.openssl.org/source/openssl-{v}.tar.gz",
+ "https://www.openssl.org/source/old/{s}/openssl-{v}.tar.gz"
+ )
src_template = "openssl-{}.tar.gz"
build_template = "openssl-{}"
# only install software, skip docs
install_target = 'install_sw'
+ def _post_install(self):
+ if self.version.startswith("3.0"):
+ self._post_install_300()
+
+ def _post_install_300(self):
+ # create ssl/ subdir with example configs
+ self._subprocess_call(
+ ["make", "-j1", "install_ssldirs"],
+ cwd=self.build_dir
+ )
+ # Install FIPS module
+ # https://wiki.openssl.org/index.php/OpenSSL_3.0#Completing_the_installation_of_the_FIPS_Module
+ fipsinstall_cnf = os.path.join(
+ self.install_dir, "ssl", "fipsinstall.cnf"
+ )
+ openssl_fips_cnf = os.path.join(
+ self.install_dir, "ssl", "openssl-fips.cnf"
+ )
+ fips_mod = os.path.join(self.lib_dir, "ossl-modules/fips.so")
+ self._subprocess_call(
+ [
+ self.openssl_cli, "fipsinstall",
+ "-out", fipsinstall_cnf,
+ "-module", fips_mod,
+ "-provider_name", "fips",
+ "-mac_name", "HMAC",
+ "-macopt", "digest:SHA256",
+ "-macopt", "hexkey:00",
+ "-section_name", "fips_sect"
+ ]
+ )
+ with open(openssl_fips_cnf, "w") as f:
+ f.write(OPENSSL_FIPS_CNF.format(self=self))
+ @property
+ def short_version(self):
+ """Short version for OpenSSL download URL"""
+ short_version = self.version.rstrip(string.ascii_letters)
+ if short_version.startswith("0.9"):
+ short_version = "0.9.x"
+ return short_version
+
class BuildLibreSSL(AbstractBuilder):
library = "LibreSSL"
- url_template = (
- "https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-{}.tar.gz")
+ url_templates = (
+ "https://ftp.openbsd.org/pub/OpenBSD/LibreSSL/libressl-{v}.tar.gz",
+ )
src_template = "libressl-{}.tar.gz"
build_template = "libressl-{}"
# has no effect, don't bother defining them
Darwin/[6789].*)
define_xopen_source=no;;
- Darwin/1[0-9].*)
+ Darwin/[12][0-9].*)
define_xopen_source=no;;
# On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but
# used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined
# has no effect, don't bother defining them
Darwin/@<:@6789@:>@.*)
define_xopen_source=no;;
- Darwin/1@<:@0-9@:>@.*)
+ Darwin/@<:@[12]@:>@@<:@0-9@:>@.*)
define_xopen_source=no;;
# On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but
# used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined
return MACOS_SDK_ROOT
cflags = sysconfig.get_config_var('CFLAGS')
- m = re.search(r'-isysroot\s+(\S+)', cflags)
+ m = re.search(r'-isysroot\s*(\S+)', cflags)
if m is not None:
MACOS_SDK_ROOT = m.group(1)
else: