--- /dev/null
+root = true
+
+[*.{py,c,cpp,h,rst,md,yml}]
+trim_trailing_whitespace = true
+insert_final_newline = true
+indent_style = space
+
+[*.{py,c,cpp,h}]
+indent_size = 4
+
+[*.yml]
+indent_size = 2
htmlview: html
$(PYTHON) -c "import webbrowser; webbrowser.open('build/html/index.html')"
-clean:
- -rm -rf build/* $(VENVDIR)/*
+clean: clean-venv
+ -rm -rf build/*
+
+clean-venv:
+ rm -rf $(VENVDIR)
venv:
- $(PYTHON) -m venv $(VENVDIR)
- $(VENVDIR)/bin/python3 -m pip install -U pip setuptools
- $(VENVDIR)/bin/python3 -m pip install -r requirements.txt
- @echo "The venv has been created in the $(VENVDIR) directory"
+ @if [ -d $(VENVDIR) ] ; then \
+ echo "venv already exists."; \
+ echo "To recreate it, remove it first with \`make clean-venv'."; \
+ else \
+ $(PYTHON) -m venv $(VENVDIR); \
+ $(VENVDIR)/bin/python3 -m pip install -U pip setuptools; \
+ $(VENVDIR)/bin/python3 -m pip install -r requirements.txt; \
+ echo "The venv has been created in the $(VENVDIR) directory"; \
+ fi
dist:
rm -rf dist
Using make
----------
-To get started on UNIX, you can create a virtual environment with the command ::
+To get started on UNIX, you can create a virtual environment and build
+documentation with the commands::
make venv
-
-That will install all the tools necessary to build the documentation. Assuming
-the virtual environment was created in the ``venv`` directory (the default;
-configurable with the VENVDIR variable), you can run the following command to
-build the HTML output files::
-
make html
-By default, if the virtual environment is not created, the Makefile will
-look for instances of sphinxbuild and blurb installed on your process PATH
-(configurable with the SPHINXBUILD and BLURB variables).
+The virtual environment in the ``venv`` directory will contain all the tools
+necessary to build the documentation downloaded and installed from PyPI.
+If you'd like to create the virtual environment in a different location,
+you can specify it using the ``VENVDIR`` variable.
+
+You can also skip creating the virtual environment altogether, in which case
+the Makefile will look for instances of ``sphinxbuild`` and ``blurb``
+installed on your process ``PATH`` (configurable with the ``SPHINXBUILD`` and
+``BLURB`` variables).
On Windows, we try to emulate the Makefile as closely as possible with a
``make.bat`` file. If you need to specify the Python interpreter to use,
-set the PYTHON environment variable instead.
+set the PYTHON environment variable.
Available make targets are:
-* "clean", which removes all build files.
+* "clean", which removes all build files and the virtual environment.
+
+* "clean-venv", which removes the virtual environment directory.
* "venv", which creates a virtual environment with all necessary tools
installed.
Article which goes into some detail about how to create a useful bug report.
This describes what kind of information is useful and why it is useful.
- `Bug Report Writing Guidelines <https://developer.mozilla.org/en-US/docs/Mozilla/QA/Bug_writing_guidelines>`_
+ `Bug Writing Guidelines <https://bugzilla.mozilla.org/page.cgi?id=bug-writing.html>`_
Information about writing a good bug report. Some of this is specific to the
Mozilla project, but describes general good practices.
This is the most common way to set the error indicator. The first argument
specifies the exception type; it is normally one of the standard exceptions,
e.g. :c:data:`PyExc_RuntimeError`. You need not increment its reference count.
- The second argument is an error message; it is decoded from ``'utf-8``'.
+ The second argument is an error message; it is decoded from ``'utf-8'``.
.. c:function:: void PyErr_SetObject(PyObject *type, PyObject *value)
than 0 and the module state (as returned by :c:func:`PyModule_GetState`)
is ``NULL``.
+ Like :c:member:`PyTypeObject.tp_clear`, this function is not *always*
+ called before a module is deallocated. For example, when reference
+ counting is enough to determine that an object is no longer used,
+ the cyclic garbage collector is not involved and
+ :c:member:`~PyModuleDef.m_free` is called directly.
+
.. versionchanged:: 3.9
No longer called before the module state is allocated.
{NULL} /* Sentinel */
};
+
+.. c:function:: PyObject* PyMember_GetOne(const char *obj_addr, struct PyMemberDef *m)
+
+ Get an attribute belonging to the object at address *obj_addr*. The
+ attribute is described by ``PyMemberDef`` *m*. Returns ``NULL``
+ on error.
+
+
+.. c:function:: int PyMember_SetOne(char *obj_addr, struct PyMemberDef *m, PyObject *o)
+
+ Set an attribute belonging to the object at address *obj_addr* to object *o*.
+ The attribute to set is described by ``PyMemberDef`` *m*. Returns ``0``
+ if successful and a negative value on failure.
+
+
.. c:type:: PyGetSetDef
Structure to define property-like access for a type. See also description of
The C structure of the objects used to describe built-in types.
-.. c:var:: PyObject* PyType_Type
+.. c:var:: PyTypeObject PyType_Type
This is the type object for type objects; it is the same object as
:class:`type` in the Python layer.
so that *self* knows the contained object can no longer be used. The
:c:func:`Py_CLEAR` macro performs the operations in a safe order.
+ Note that :c:member:`~PyTypeObject.tp_clear` is not *always* called
+ before an instance is deallocated. For example, when reference counting
+ is enough to determine that an object is no longer used, the cyclic garbage
+ collector is not involved and :c:member:`~PyTypeObject.tp_dealloc` is
+ called directly.
+
Because the goal of :c:member:`~PyTypeObject.tp_clear` functions is to break reference cycles,
it's not necessary to clear contained objects like Python strings or Python
integers, which can't participate in reference cycles. On the other hand, it may
``PyUnicode_WCHAR_KIND`` is deprecated.
-.. c:function:: int PyUnicode_KIND(PyObject *o)
+.. c:function:: unsigned int PyUnicode_KIND(PyObject *o)
Return one of the PyUnicode kind constants (see above) that indicate how many
bytes per character this Unicode object uses to store its data. *o* has to
# Custom sidebar templates, filenames relative to this file.
html_sidebars = {
- # Defaults taken from http://www.sphinx-doc.org/en/stable/config.html#confval-html_sidebars
+ # Defaults taken from https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-html_sidebars
# Removes the quick search block
'**': ['localtoc.html', 'relations.html', 'customsourcelink.html'],
'index': ['indexsidebar.html'],
* `The .pypirc file`_
.. _Project structure: \
- https://packaging.python.org/tutorials/distributing-packages/
+ https://packaging.python.org/tutorials/packaging-projects/#packaging-python-projects
.. _Building and packaging the project: \
- https://packaging.python.org/tutorials/distributing-packages/#packaging-your-project
+ https://packaging.python.org/tutorials/packaging-projects/#creating-the-package-files
.. _Uploading the project to the Python Packaging Index: \
- https://packaging.python.org/tutorials/distributing-packages/#uploading-your-project-to-pypi
+ https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives
.. _The .pypirc file: \
https://packaging.python.org/specifications/pypirc/
General GUI Questions
=====================
-What platform-independent GUI toolkits exist for Python?
-========================================================
-
-Depending on what platform(s) you are aiming at, there are several. Some
-of them haven't been ported to Python 3 yet. At least `Tkinter`_ and `Qt`_
-are known to be Python 3-compatible.
-
-.. XXX check links
-
-Tkinter
--------
+What GUI toolkits exist for Python?
+===================================
Standard builds of Python include an object-oriented interface to the Tcl/Tk
widget set, called :ref:`tkinter <Tkinter>`. This is probably the easiest to
`Tcl/Tk home page <https://www.tcl.tk>`_. Tcl/Tk is fully portable to the
Mac OS X, Windows, and Unix platforms.
-wxWidgets
----------
-
-wxWidgets (https://www.wxwidgets.org) is a free, portable GUI class
-library written in C++ that provides a native look and feel on a
-number of platforms, with Windows, Mac OS X, GTK, X11, all listed as
-current stable targets. Language bindings are available for a number
-of languages including Python, Perl, Ruby, etc.
-
-`wxPython <https://www.wxpython.org>`_ is the Python binding for
-wxwidgets. While it often lags slightly behind the official wxWidgets
-releases, it also offers a number of features via pure Python
-extensions that are not available in other language bindings. There
-is an active wxPython user and developer community.
-
-Both wxWidgets and wxPython are free, open source, software with
-permissive licences that allow their use in commercial products as
-well as in freeware or shareware.
-
-
-Qt
----
-
-There are bindings available for the Qt toolkit (using either `PyQt
-<https://riverbankcomputing.com/software/pyqt/intro>`_ or `PySide
-<https://wiki.qt.io/PySide>`_) and for KDE (`PyKDE4 <https://techbase.kde.org/Languages/Python/Using_PyKDE_4>`__).
-PyQt is currently more mature than PySide, but you must buy a PyQt license from
-`Riverbank Computing <https://www.riverbankcomputing.com/commercial/license-faq>`_
-if you want to write proprietary applications. PySide is free for all applications.
-
-Qt 4.5 upwards is licensed under the LGPL license; also, commercial licenses
-are available from `The Qt Company <https://www.qt.io/licensing/>`_.
-
-Gtk+
-----
-
-The `GObject introspection bindings <https://wiki.gnome.org/Projects/PyGObject>`_
-for Python allow you to write GTK+ 3 applications. There is also a
-`Python GTK+ 3 Tutorial <https://python-gtk-3-tutorial.readthedocs.io>`_.
-
-The older PyGtk bindings for the `Gtk+ 2 toolkit <https://www.gtk.org>`_ have
-been implemented by James Henstridge; see <http://www.pygtk.org>.
-
-Kivy
-----
-
-`Kivy <https://kivy.org/>`_ is a cross-platform GUI library supporting both
-desktop operating systems (Windows, macOS, Linux) and mobile devices (Android,
-iOS). It is written in Python and Cython, and can use a range of windowing
-backends.
-
-Kivy is free and open source software distributed under the MIT license.
-
-FLTK
-----
-
-Python bindings for `the FLTK toolkit <http://www.fltk.org>`_, a simple yet
-powerful and mature cross-platform windowing system, are available from `the
-PyFLTK project <https://pyfltk.sourceforge.io/>`_.
-
-OpenGL
-------
-
-For OpenGL bindings, see `PyOpenGL <http://pyopengl.sourceforge.net>`_.
-
-
-What platform-specific GUI toolkits exist for Python?
-========================================================
-
-By installing the `PyObjc Objective-C bridge
-<https://pypi.org/project/pyobjc/>`_, Python programs can use Mac OS X's
-Cocoa libraries.
-
-:ref:`Pythonwin <windows-faq>` by Mark Hammond includes an interface to the
-Microsoft Foundation Classes and a Python programming environment
-that's written mostly in Python using the MFC classes.
-
+Depending on what platform(s) you are aiming at, there are also several
+alternatives. A `list of cross-platform
+<https://wiki.python.org/moin/GuiProgramming#Cross-Platform_Frameworks>`_ and
+`platform-specific
+<https://wiki.python.org/moin/GuiProgramming#Platform-specific_Frameworks>`_ GUI
+frameworks can be found on the python wiki.
Tkinter questions
=================
return False
+How can a subclass control what data is stored in an immutable instance?
+------------------------------------------------------------------------
+
+When subclassing an immutable type, override the :meth:`__new__` method
+instead of the :meth:`__init__` method. The latter only runs *after* an
+instance is created, which is too late to alter data in an immutable
+instance.
+
+All of these immutable classes have a different signature than their
+parent class:
+
+.. testcode::
+
+ from datetime import date
+
+ class FirstOfMonthDate(date):
+ "Always choose the first day of the month"
+ def __new__(cls, year, month, day):
+ return super().__new__(cls, year, month, 1)
+
+ class NamedInt(int):
+ "Allow text names for some numbers"
+ xlat = {'zero': 0, 'one': 1, 'ten': 10}
+ def __new__(cls, value):
+ value = cls.xlat.get(value, value)
+ return super().__new__(cls, value)
+
+ class TitleStr(str):
+ "Convert str to name suitable for a URL path"
+ def __new__(cls, s):
+ s = s.lower().replace(' ', '-')
+ s = ''.join([c for c in s if c.isalnum() or c == '-'])
+ return super().__new__(cls, s)
+
+The classes can be used like this:
+
+.. doctest::
+
+ >>> FirstOfMonthDate(2012, 2, 14)
+ FirstOfMonthDate(2012, 2, 1)
+ >>> NamedInt('ten')
+ 10
+ >>> NamedInt(20)
+ 20
+ >>> TitleStr('Blog: Why Python Rocks')
+ 'blog-why-python-rocks'
+
+
Modules
=======
which describe this functionality.
__future__
- A pseudo-module which programmers can use to enable new language features
- which are not compatible with the current interpreter.
-
- By importing the :mod:`__future__` module and evaluating its variables,
- you can see when a new feature was first added to the language and when it
- becomes the default::
+ A :ref:`future statement <future>`, ``from __future__ import <feature>``,
+ directs the compiler to compile the current module using syntax or
+ semantics that will become standard in a future release of Python.
+ The :mod:`__future__` module documents the possible values of
+ *feature*. By importing this module and evaluating its variables,
+ you can see when a new feature was first added to the language and
+ when it will (or did) become the default::
>>> import __future__
>>> __future__.division
if __name__=='__main__':
main()
+
+
+.. patterns-to-avoid:
+
+Patterns to avoid
+-----------------
+
+Although the preceding sections have described ways of doing things you might
+need to do or deal with, it is worth mentioning some usage patterns which are
+*unhelpful*, and which should therefore be avoided in most cases. The following
+sections are in no particular order.
+
+
+Opening the same log file multiple times
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+On Windows, you will generally not be able to open the same file multiple times
+as this will lead to a "file is in use by another process" error. However, on
+POSIX platforms you'll not get any errors if you open the same file multiple
+times. This could be done accidentally, for example by:
+
+* Adding a file handler more than once which references the same file (e.g. by
+ a copy/paste/forget-to-change error).
+
+* Opening two files that look different, as they have different names, but are
+ the same because one is a symbolic link to the other.
+
+* Forking a process, following which both parent and child have a reference to
+ the same file. This might be through use of the :mod:`multiprocessing` module,
+ for example.
+
+Opening a file multiple times might *appear* to work most of the time, but can
+lead to a number of problems in practice:
+
+* Logging output can be garbled because multiple threads or processes try to
+ write to the same file. Although logging guards against concurrent use of the
+ same handler instance by multiple threads, there is no such protection if
+ concurrent writes are attempted by two different threads using two different
+ handler instances which happen to point to the same file.
+
+* An attempt to delete a file (e.g. during file rotation) silently fails,
+ because there is another reference pointing to it. This can lead to confusion
+ and wasted debugging time - log entries end up in unexpected places, or are
+ lost altogether.
+
+Use the techniques outlined in :ref:`multiple-processes` to circumvent such
+issues.
+
+Using loggers as attributes in a class or passing them as parameters
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+While there might be unusual cases where you'll need to do this, in general
+there is no point because loggers are singletons. Code can always access a
+given logger instance by name using ``logging.getLogger(name)``, so passing
+instances around and holding them as instance attributes is pointless. Note
+that in other languages such as Java and C#, loggers are often static class
+attributes. However, this pattern doesn't make sense in Python, where the
+module (and not the class) is the unit of software decomposition.
+
+
+Adding handlers other than :class:`NullHandler` to a logger in a library
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Configuring logging by adding handlers, formatters and filters is the
+responsibility of the application developer, not the library developer. If you
+are maintaining a library, ensure that you don't add handlers to any of your
+loggers other than a :class:`~logging.NullHandler` instance.
+
+
+Creating a lot of loggers
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Loggers are singletons that are never freed during a script execution, and so
+creating lots of loggers will use up memory which can't then be freed. Rather
+than create a logger per e.g. file processed or network connection made, use
+the :ref:`existing mechanisms <context-info>` for passing contextual
+information into your logs and restrict the loggers created to those describing
+areas within your application (generally modules, but occasionally slightly
+more fine-grained than that).
came into existence, you can read Nick Coghlan's `Python 3 Q & A`_ or
Brett Cannon's `Why Python 3 exists`_.
- For help with porting, you can email the python-porting_ mailing list with
- questions.
+
+ For help with porting, you can view the archived python-porting_ mailing list.
The Short Explanation
=====================
.. _pytype: https://github.com/google/pytype
.. _python-future: http://python-future.org/
-.. _python-porting: https://mail.python.org/mailman/listinfo/python-porting
+.. _python-porting: https://mail.python.org/pipermail/python-porting/
.. _six: https://pypi.org/project/six
.. _tox: https://pypi.org/project/tox
.. _trove classifier: https://pypi.org/classifiers
>>> parser.parse_args(['--no-foo'])
Namespace(foo=False)
+.. versionadded:: 3.9
+
The recommended way to create a custom action is to extend :class:`Action`,
overriding the ``__call__`` method and optionally the ``__init__`` and
``format_usage`` methods.
A function definition.
* ``name`` is a raw string of the function name.
- * ``args`` is a :class:`arguments` node.
+ * ``args`` is an :class:`arguments` node.
* ``body`` is the list of nodes inside the function.
* ``decorator_list`` is the list of decorators to be applied, stored outermost
first (i.e. the first in the list will be applied last).
* ``bases`` is a list of nodes for explicitly specified base classes.
* ``keywords`` is a list of :class:`keyword` nodes, principally for 'metaclass'.
Other keywords will be passed to the metaclass, as per `PEP-3115
- <http://www.python.org/dev/peps/pep-3115/>`_.
+ <https://www.python.org/dev/peps/pep-3115/>`_.
* ``starargs`` and ``kwargs`` are each a single node, as in a function call.
starargs will be expanded to join the list of base classes, and kwargs will
be passed to the metaclass.
A thread-safe variant of :meth:`call_soon`. Must be used to
schedule callbacks *from another thread*.
+ Raises :exc:`RuntimeError` if called on a loop that's been closed.
+ This can happen on a secondary thread when the main application is
+ shutting down.
+
See the :ref:`concurrency and multithreading <asyncio-multithreading>`
section of the documentation.
.. function:: unregister(func)
- Remove *func* from the list of functions to be run at interpreter
- shutdown. After calling :func:`unregister`, *func* is guaranteed not to be
- called when the interpreter shuts down, even if it was registered more than
- once. :func:`unregister` silently does nothing if *func* was not previously
- registered.
+ Remove *func* from the list of functions to be run at interpreter shutdown.
+ :func:`unregister` silently does nothing if *func* was not previously
+ registered. If *func* has been registered more than once, every occurrence
+ of that function in the :mod:`atexit` call stack will be removed. Equality
+ comparisons (``==``) are used internally during unregistration, so function
+ references do not need to have matching identities.
.. seealso::
... content = f.read()
>>> content == data # Check equality to original object after round-trip
True
+
+.. testcleanup::
+
+ import os
+ os.remove("myfile.bz2")
self.move_to_end(key)
An :class:`OrderedDict` would also be useful for implementing
-variants of :func:`functools.lru_cache`::
+variants of :func:`functools.lru_cache`:
- class LRU(OrderedDict):
- 'Limit size, evicting the least recently looked-up key when full'
+.. testcode::
- def __init__(self, maxsize=128, /, *args, **kwds):
- self.maxsize = maxsize
- super().__init__(*args, **kwds)
+ class LRU:
- def __getitem__(self, key):
- value = super().__getitem__(key)
- self.move_to_end(key)
+ def __init__(self, func, maxsize=128):
+ self.func = func
+ self.maxsize = maxsize
+ self.cache = OrderedDict()
+
+ def __call__(self, *args):
+ if args in self.cache:
+ value = self.cache[args]
+ self.cache.move_to_end(args)
+ return value
+ value = self.func(*args)
+ if len(self.cache) >= self.maxsize:
+ self.cache.popitem(False)
+ self.cache[args] = value
return value
- def __setitem__(self, key, value):
- if key in self:
- self.move_to_end(key)
- super().__setitem__(key, value)
- if len(self) > self.maxsize:
- oldest = next(iter(self))
- del self[oldest]
+.. doctest::
+ :hide:
+
+ >>> def square(x):
+ ... return x ** 2
+ ...
+ >>> s = LRU(square, maxsize=5)
+ >>> actual = [(s(x), s(x)) for x in range(20)]
+ >>> expected = [(x**2, x**2) for x in range(20)]
+ >>> actual == expected
+ True
+ >>> actual = list(s.cache.items())
+ >>> expected = [((x,), x**2) for x in range(15, 20)]
+ >>> actual == expected
+ True
:class:`UserDict` objects
regular dictionary, which is accessible via the :attr:`data` attribute of
:class:`UserDict` instances. If *initialdata* is provided, :attr:`data` is
initialized with its contents; note that a reference to *initialdata* will not
- be kept, allowing it be used for other purposes.
+ be kept, allowing it to be used for other purposes.
In addition to supporting the methods and operations of mappings,
:class:`UserDict` instances provide the following attribute:
If the future is cancelled before completing then :exc:`.CancelledError`
will be raised.
- If the call raised, this method will raise the same exception.
+ If the call raised an exception, this method will raise the same exception.
.. method:: exception(timeout=None)
import configparser
+.. testcleanup::
+
+ import os
+ os.remove("example.ini")
+
Quick Start
-----------
from contextlib import closing
from urllib.request import urlopen
- with closing(urlopen('http://www.python.org')) as page:
+ with closing(urlopen('https://www.python.org')) as page:
for line in page:
print(line)
:class:`Dialect` class or one of the strings returned by the
:func:`list_dialects` function. The other optional *fmtparams* keyword arguments
can be given to override individual formatting parameters in the current
- dialect. For full details about the dialect and formatting parameters, see
- section :ref:`csv-fmt-params`. To make it
+ dialect. For full details about dialects and formatting parameters, see
+ the :ref:`csv-fmt-params` section. To make it
as easy as possible to interface with modules which implement the DB API, the
value :const:`None` is written as the empty string. While this isn't a
reversible transformation, it makes it easier to dump SQL NULL data values to
Associate *dialect* with *name*. *name* must be a string. The
dialect can be specified either by passing a sub-class of :class:`Dialect`, or
by *fmtparams* keyword arguments, or both, with keyword arguments overriding
- parameters of the dialect. For full details about the dialect and formatting
+ parameters of the dialect. For full details about dialects and formatting
parameters, see section :ref:`csv-fmt-params`.
.. class:: Dialect
- The :class:`Dialect` class is a container class relied on primarily for its
- attributes, which are used to define the parameters for a specific
- :class:`reader` or :class:`writer` instance.
+ The :class:`Dialect` class is a container class whose attributes contain
+ information for how to handle doublequotes, whitespace, delimiters, etc.
+ Due to the lack of a strict CSV specification, different applications
+ produce subtly different CSV data. :class:`Dialect` instances define how
+ :class:`reader` and :class:`writer` instances behave.
+
+ All available :class:`Dialect` names are returned by :func:`list_dialects`,
+ and they can be registered with specific :class:`reader` and :class:`writer`
+ classes through their initializer (``__init__``) functions like this::
+
+ import csv
+
+ with open('students.csv', 'w', newline='') as csvfile:
+ writer = csv.writer(csvfile, dialect='unix')
+ ^^^^^^^^^^^^^^
.. class:: excel()
Return the next row of the reader's iterable object as a list (if the object
was returned from :func:`reader`) or a dict (if it is a :class:`DictReader`
- instance), parsed according to the current dialect. Usually you should call
- this as ``next(reader)``.
+ instance), parsed according to the current :class:`Dialect`. Usually you
+ should call this as ``next(reader)``.
Reader objects have the following public attributes:
.. method:: csvwriter.writerow(row)
- Write the *row* parameter to the writer's file object, formatted according to
- the current dialect. Return the return value of the call to the *write* method
- of the underlying file object.
+ Write the *row* parameter to the writer's file object, formatted according
+ to the current :class:`Dialect`. Return the return value of the call to the
+ *write* method of the underlying file object.
.. versionchanged:: 3.5
Added support of arbitrary iterables.
This opcode performs several operations before a with block starts. First,
it loads :meth:`~object.__exit__` from the context manager and pushes it onto
- the stack for later use by :opcode:`WITH_CLEANUP_START`. Then,
+ the stack for later use by :opcode:`WITH_EXCEPT_START`. Then,
:meth:`~object.__enter__` is called, and a finally block pointing to *delta*
is pushed. Finally, the result of calling the ``__enter__()`` method is pushed onto
the stack. The next opcode will either ignore it (:opcode:`POP_TOP`), or
.. exception:: NotADirectoryError
- Raised when a directory operation (such as :func:`os.listdir`) is requested
- on something which is not a directory.
+ Raised when a directory operation (such as :func:`os.listdir`) is requested on
+ something which is not a directory. On most POSIX platforms, it may also be
+ raised if an operation attempts to open or traverse a non-directory file as if
+ it were a directory.
Corresponds to :c:data:`errno` ``ENOTDIR``.
.. exception:: PermissionError
Compare the files named *f1* and *f2*, returning ``True`` if they seem equal,
``False`` otherwise.
- If *shallow* is true, files with identical :func:`os.stat` signatures are
- taken to be equal. Otherwise, the contents of the files are compared.
+ If *shallow* is true and the :func:`os.stat` signatures (file type, size, and
+ modification time) of both files are identical, the files are taken to be
+ equal.
+
+ Otherwise, the files are treated as different if their sizes or contents differ.
Note that no external programs are called from this function, giving it
portability and efficiency.
* ``'replace'`` causes a replacement marker (such as ``'?'``) to be inserted
where there is malformed data.
- * ``'surrogateescape'`` will represent any incorrect bytes as code
- points in the Unicode Private Use Area ranging from U+DC80 to
- U+DCFF. These private code points will then be turned back into
+ * ``'surrogateescape'`` will represent any incorrect bytes as low
+ surrogate code units ranging from U+DC80 to U+DCFF.
+ These surrogate code units will then be turned back into
the same bytes when the ``surrogateescape`` error handler is used
when writing data. This is useful for processing files in an
unknown encoding.
@lru_cache(maxsize=32)
def get_pep(num):
'Retrieve text of a Python Enhancement Proposal'
- resource = 'http://www.python.org/dev/peps/pep-%04d/' % num
+ resource = 'https://www.python.org/dev/peps/pep-%04d/' % num
try:
with urllib.request.urlopen(resource) as s:
return s.read()
.. method:: static_order()
- Returns an iterable of nodes in a topological order. Using this method
- does not require to call :meth:`TopologicalSorter.prepare` or
- :meth:`TopologicalSorter.done`. This method is equivalent to::
+ Returns an iterator object which will iterate over nodes in a topological
+ order. When using this method, :meth:`~TopologicalSorter.prepare` and
+ :meth:`~TopologicalSorter.done` should not be called. This method is
+ equivalent to::
def static_order(self):
self.prepare()
The detected cycle can be accessed via the second element in the :attr:`~CycleError.args`
attribute of the exception instance and consists in a list of nodes, such that each node is,
in the graph, an immediate predecessor of the next node in the list. In the reported list,
- the first and the last node will be the same, to make it clear that it is cyclic.
\ No newline at end of file
+ the first and the last node will be the same, to make it clear that it is cyclic.
.. versionadded:: 3.6
:func:`blake2b` and :func:`blake2s` were added.
+.. _hashlib-usedforsecurity:
+
.. versionchanged:: 3.9
All hashlib constructors take a keyword-only argument *usedforsecurity*
with default value ``True``. A false value allows the use of insecure and
request header it responds back with a ``100 Continue`` followed by ``200
OK`` headers.
This method can be overridden to raise an error if the server does not
- want the client to continue. For e.g. server can chose to send ``417
+ want the client to continue. For e.g. server can choose to send ``417
Expectation Failed`` as a response header and ``return False``.
.. versionadded:: 3.2
after adding imports at the top of a file. This also increases
possible attribute completions.
-Completion boxes intially exclude names beginning with '_' or, for
+Completion boxes initially exclude names beginning with '_' or, for
modules, not included in '__all__'. The hidden names can be accessed
by typing '_' after '.', either before or after the box is opened.
You can also get the full set of files contained within a distribution. The
``files()`` function takes a distribution package name and returns all of the
files installed by this distribution. Each file object returned is a
-``PackagePath``, a :class:`pathlib.Path` derived object with additional ``dist``,
+``PackagePath``, a :class:`pathlib.PurePath` derived object with additional ``dist``,
``size``, and ``hash`` properties as indicated by the metadata. For example::
>>> util = [p for p in files('wheel') if 'util.py' in str(p)][0] # doctest: +SKIP
return s.encode('utf-8')
return s
+You can also use the ``locate`` method to get a the absolute path to the
+file::
+
+ >>> util.locate() # doctest: +SKIP
+ PosixPath('/home/gustav/example/lib/site-packages/wheel/util.py')
+
In the case where the metadata file listing files
(RECORD or SOURCES.txt) is missing, ``files()`` will
return ``None``. The caller may wish to wrap calls to
.. method:: is_package(fullname)
- An abstract method to return a true value if the module is a package, a
+ An optional method to return a true value if the module is a package, a
false value otherwise. :exc:`ImportError` is raised if the
:term:`loader` cannot find the module.
unix.rst
superseded.rst
undoc.rst
+ security_warnings.rst
to control the number of lines read: no more lines will be read if the
total size (in bytes/characters) of all lines so far exceeds *hint*.
+ *hint* values of ``0`` or less, as well as ``None``, are treated as no
+ hint.
+
Note that it's already possible to iterate on file objects using ``for
line in file: ...`` without calling ``file.readlines()``.
The above change was also included in Python 3.9 starting with
version 3.9.5.
+ .. versionchanged:: 3.8.12
+
+ The above change was also included in Python 3.8 starting with
+ version 3.8.12.
+
.. attribute:: version
The appropriate version number: ``4`` for IPv4, ``6`` for IPv6.
send it to the socket as a sequence of bytes preceded by a four-byte length
string packed in binary using ``struct.pack('>L', n)``.
+ .. _logging-eval-security:
+
.. note::
Because portions of the configuration are passed through
:meth:`~Logger.setLevel` and :meth:`~Logger.hasHandlers` methods were added
to :class:`LoggerAdapter`. These methods delegate to the underlying logger.
+.. versionchanged:: 3.6
+ Attribute :attr:`manager` and method :meth:`_log` were added, which
+ delegate to the underlying logger and allow adapters to be nested.
+
Thread Safety
-------------
The *version* argument indicates the data format that ``dump`` should use
(see below).
+ .. audit-event:: marshal.dumps value,version marshal.dump
+
.. function:: load(file)
format), raise :exc:`EOFError`, :exc:`ValueError` or :exc:`TypeError`. The
file must be a readable :term:`binary file`.
+ .. audit-event:: marshal.load "" marshal.load
+
.. note::
If an object containing an unsupported type was marshalled with :func:`dump`,
:func:`load` will substitute ``None`` for the unmarshallable type.
+ .. versionchanged:: 3.9.7
+
+ This call used to raise a ``code.__new__`` audit event for each code object. Now
+ it raises a single ``marshal.load`` event for the entire load operation.
+
.. function:: dumps(value[, version])
The *version* argument indicates the data format that ``dumps`` should use
(see below).
+ .. audit-event:: marshal.dumps value,version marshal.dump
+
.. function:: loads(bytes)
:exc:`EOFError`, :exc:`ValueError` or :exc:`TypeError`. Extra bytes in the
input are ignored.
+ .. audit-event:: marshal.loads bytes marshal.load
+
+ .. versionchanged:: 3.9.7
+
+ This call used to raise a ``code.__new__`` audit event for each code object. Now
+ it raises a single ``marshal.loads`` event for the entire load operation.
+
In addition, the following constants are defined:
>>> arr2
array('i', [0, 1, 2, 3, 4, 0, 0, 0, 0, 0])
+.. _multiprocessing-recv-pickle-security:
.. warning::
that contains symbolic links. On Windows, it converts forward slashes to
backward slashes. To normalize case, use :func:`normcase`.
+ .. note::
+ On POSIX systems, in accordance with `IEEE Std 1003.1 2013 Edition; 4.13
+ Pathname Resolution <http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap04.html#tag_04_13>`_,
+ if a pathname begins with exactly two slashes, the first component
+ following the leading characters may be interpreted in an implementation-defined
+ manner, although more than two leading characters shall be treated as a
+ single character.
+
.. versionchanged:: 3.6
Accepts a :term:`path-like object`.
On Windows, splits a pathname into drive/UNC sharepoint and relative path.
If the path contains a drive letter, drive will contain everything
- up to and including the colon.
- e.g. ``splitdrive("c:/dir")`` returns ``("c:", "/dir")``
+ up to and including the colon::
+
+ >>> splitdrive("c:/dir")
+ ("c:", "/dir")
If the path contains a UNC path, drive will contain the host name
- and share, up to but not including the fourth separator.
- e.g. ``splitdrive("//host/computer/dir")`` returns ``("//host/computer", "/dir")``
+ and share, up to but not including the fourth separator::
+
+ >>> splitdrive("//host/computer/dir")
+ ("//host/computer", "/dir")
.. versionchanged:: 3.6
Accepts a :term:`path-like object`.
.. function:: splitext(path)
Split the pathname *path* into a pair ``(root, ext)`` such that ``root + ext ==
- path``, and *ext* is empty or begins with a period and contains at most one
- period. Leading periods on the basename are ignored; ``splitext('.cshrc')``
- returns ``('.cshrc', '')``.
+ path``, and the extension, *ext*, is empty or begins with a period and contains at
+ most one period.
+
+ If the path contains no extension, *ext* will be ``''``::
+
+ >>> splitext('bar')
+ ('bar', '')
+
+ If the path contains an extension, then *ext* will be set to this extension,
+ including the leading period. Note that previous periods will be ignored::
+
+ >>> splitext('foo.bar.exe')
+ ('foo.bar', '.exe')
+
+ Leading periods on the basename are ignored::
+
+ >>> splitext('.cshrc')
+ ('.cshrc', '')
.. versionchanged:: 3.6
Accepts a :term:`path-like object`.
.. note::
- On some platforms, including FreeBSD and Mac OS X, setting ``environ`` may
+ On some platforms, including FreeBSD and macOS, setting ``environ`` may
cause memory leaks. Refer to the system documentation for
:c:func:`putenv`.
.. note::
- On Mac OS X, :func:`getgroups` behavior differs somewhat from
+ On macOS, :func:`getgroups` behavior differs somewhat from
other Unix platforms. If the Python interpreter was built with a
deployment target of :const:`10.5` or earlier, :func:`getgroups` returns
the list of effective group ids associated with the current user process;
.. note::
- On some platforms, including FreeBSD and Mac OS X, setting ``environ`` may
+ On some platforms, including FreeBSD and macOS, setting ``environ`` may
cause memory leaks. Refer to the system documentation for :c:func:`putenv`.
.. audit-event:: os.putenv key,value os.putenv
.. availability:: Unix.
- .. note:: On Mac OS X, the length of *groups* may not exceed the
+ .. note:: On macOS, the length of *groups* may not exceed the
system-defined maximum number of effective group ids, typically 16.
See the documentation for :func:`getgroups` for cases where it may not
return the same group list set by calling setgroups().
On Linux, if *offset* is given as ``None``, the bytes are read from the
current position of *in_fd* and the position of *in_fd* is updated.
- The second case may be used on Mac OS X and FreeBSD where *headers* and
+ The second case may be used on macOS and FreeBSD where *headers* and
*trailers* are arbitrary sequences of buffers that are written before and
after the data from *in_fd* is written. It returns the same as the first case.
- On Mac OS X and FreeBSD, a value of ``0`` for *count* specifies to send until
+ On macOS and FreeBSD, a value of ``0`` for *count* specifies to send until
the end of *in_fd* is reached.
All platforms support sockets as *out_fd* file descriptor, and some platforms
+++ /dev/null
-.. _other-gui-packages:
-
-Other Graphical User Interface Packages
-=======================================
-
-Major cross-platform (Windows, Mac OS X, Unix-like) GUI toolkits are
-available for Python:
-
-.. seealso::
-
- `PyGObject <https://wiki.gnome.org/Projects/PyGObject>`_
- PyGObject provides introspection bindings for C libraries using
- `GObject <https://developer.gnome.org/gobject/stable/>`_. One of
- these libraries is the `GTK+ 3 <https://www.gtk.org/>`_ widget set.
- GTK+ comes with many more widgets than Tkinter provides. An online
- `Python GTK+ 3 Tutorial <https://python-gtk-3-tutorial.readthedocs.io/>`_
- is available.
-
- `PyGTK <http://www.pygtk.org/>`_
- PyGTK provides bindings for an older version
- of the library, GTK+ 2. It provides an object oriented interface that
- is slightly higher level than the C one. There are also bindings to
- `GNOME <https://www.gnome.org/>`_. An online `tutorial
- <http://www.pygtk.org/pygtk2tutorial/index.html>`_ is available.
-
- `PyQt <https://riverbankcomputing.com/software/pyqt/intro>`_
- PyQt is a :program:`sip`\ -wrapped binding to the Qt toolkit. Qt is an
- extensive C++ GUI application development framework that is
- available for Unix, Windows and Mac OS X. :program:`sip` is a tool
- for generating bindings for C++ libraries as Python classes, and
- is specifically designed for Python.
-
- `PySide2 <https://doc.qt.io/qtforpython/>`_
- Also known as the Qt for Python project, PySide2 is a newer binding to the
- Qt toolkit. It is provided by The Qt Company and aims to provide a
- complete port of PySide to Qt 5. Compared to PyQt, its licensing scheme is
- friendlier to non-open source applications.
-
- `wxPython <https://www.wxpython.org>`_
- wxPython is a cross-platform GUI toolkit for Python that is built around
- the popular `wxWidgets <https://www.wxwidgets.org/>`_ (formerly wxWindows)
- C++ toolkit. It provides a native look and feel for applications on
- Windows, Mac OS X, and Unix systems by using each platform's native
- widgets where ever possible, (GTK+ on Unix-like systems). In addition to
- an extensive set of widgets, wxPython provides classes for online
- documentation and context sensitive help, printing, HTML viewing,
- low-level device context drawing, drag and drop, system clipboard access,
- an XML-based resource format and more, including an ever growing library
- of user-contributed modules.
-
-PyGTK, PyQt, PySide2, and wxPython, all have a modern look and feel and more
-widgets than Tkinter. In addition, there are many other GUI toolkits for
-Python, both cross-platform, and platform-specific. See the `GUI Programming
-<https://wiki.python.org/moin/GuiProgramming>`_ page in the Python Wiki for a
-much more complete list, and also for links to documents where the
-different GUI toolkits are compared.
-
.. note::
- On Mac OS X (and perhaps other platforms), executable files may be
+ On macOS (and perhaps other platforms), executable files may be
universal files containing multiple architectures.
To get at the "64-bitness" of the current interpreter, it is more
.. function:: findall(pattern, string, flags=0)
Return all non-overlapping matches of *pattern* in *string*, as a list of
- strings. The *string* is scanned left-to-right, and matches are returned in
- the order found. If one or more groups are present in the pattern, return a
- list of groups; this will be a list of tuples if the pattern has more than
- one group. Empty matches are included in the result.
+ strings or tuples. The *string* is scanned left-to-right, and matches
+ are returned in the order found. Empty matches are included in the result.
+
+ The result depends on the number of capturing groups in the pattern.
+ If there are no groups, return a list of strings matching the whole
+ pattern. If there is exactly one group, return a list of strings
+ matching that group. If multiple groups are present, return a list
+ of tuples of strings matching the groups. Non-capturing groups do not
+ affect the form of the result.
+
+ >>> re.findall(r'\bf[a-z]*', 'which foot or hand fell fastest')
+ ['foot', 'fell', 'fastest']
+ >>> re.findall(r'(\w+)=(\d+)', 'set width=20 and height=10')
+ [('width', '20'), ('height', '10')]
.. versionchanged:: 3.7
Non-empty matches can now start just after a previous empty match.
This is useful if you want to match an arbitrary literal string that may
have regular expression metacharacters in it. For example::
- >>> print(re.escape('http://www.python.org'))
- http://www\.python\.org
+ >>> print(re.escape('https://www.python.org'))
+ https://www\.python\.org
>>> legal_chars = string.ascii_lowercase + string.digits + "!#$%&'*+-.^_`|~:"
>>> print('[%s]+' % re.escape(legal_chars))
--- /dev/null
+.. _security-warnings:
+
+.. index:: single: security considerations
+
+Security Considerations
+=======================
+
+The following modules have specific security considerations:
+
+* :mod:`cgi`: :ref:`CGI security considerations <cgi-security>`
+* :mod:`hashlib`: :ref:`all constructors take a "usedforsecurity" keyword-only
+ argument disabling known insecure and blocked algorithms
+ <hashlib-usedforsecurity>`
+* :mod:`http.server` is not suitable for production use, only implementing
+ basic security checks
+* :mod:`logging`: :ref:`Logging configuration uses eval()
+ <logging-eval-security>`
+* :mod:`multiprocessing`: :ref:`Connection.recv() uses pickle
+ <multiprocessing-recv-pickle-security>`
+* :mod:`pickle`: :ref:`Restricting globals in pickle <pickle-restrict>`
+* :mod:`random` shouldn't be used for security purposes, use :mod:`secrets`
+ instead
+* :mod:`shelve`: :ref:`shelve is based on pickle and thus unsuitable for
+ dealing with untrusted sources <shelve-security>`
+* :mod:`ssl`: :ref:`SSL/TLS security considerations <ssl-security>`
+* :mod:`subprocess`: :ref:`Subprocess security considerations
+ <subprocess-security>`
+* :mod:`tempfile`: :ref:`mktemp is deprecated due to vulnerability to race
+ conditions <tempfile-mktemp-deprecated>`
+* :mod:`xml`: :ref:`XML vulnerabilities <xml-vulnerabilities>`
+* :mod:`zipfile`: :ref:`maliciously prepared .zip files can cause disk volume
+ exhaustion <zipfile-resources-limitations>`
with shelve.open('spam') as db:
db['eggs'] = 'eggs'
+.. _shelve-security:
+
.. warning::
Because the :mod:`shelve` module is backed by :mod:`pickle`, it is insecure
to load a shelf from an untrusted source. Like with pickle, loading a shelf
can execute arbitrary code.
-Shelf objects support all methods supported by dictionaries. This eases the
+Shelf objects support most of methods and operations supported by dictionaries
+(except copying, constructors and operators ``|`` and ``|=``). This eases the
transition from dictionary based scripts to those requiring persistent storage.
Two additional methods are supported:
.. audit-event:: shutil.make_archive base_name,format,root_dir,base_dir shutil.make_archive
+ .. note::
+
+ This function is not thread-safe.
+
.. versionchanged:: 3.8
The modern pax (POSIX.1-2001) format is now used instead of
the legacy GNU format for archives created with ``format="tar"``.
it is interpreted as the local host. To find the fully qualified name, the
hostname returned by :func:`gethostbyaddr` is checked, followed by aliases for the
host, if available. The first name which includes a period is selected. In
- case no fully qualified domain name is available, the hostname as returned by
- :func:`gethostname` is returned.
+ case no fully qualified domain name is available and *name* was provided,
+ it is returned unchanged. If *name* was empty or equal to ``'0.0.0.0'``,
+ the hostname from :func:`gethostname` is returned.
.. function:: gethostbyname(hostname)
does not include the type, i. e. if you use something like
``'as "Expiration date [datetime]"'`` in your SQL, then we will parse out
everything until the first ``'['`` for the column name and strip
- the preceeding space: the column name would simply be "Expiration date".
+ the preceding space: the column name would simply be "Expiration date".
.. function:: connect(database[, timeout, detect_types, isolation_level, check_same_thread, factory, cached_statements, uri])
This is a nonstandard convenience method for executing multiple SQL statements
at once. It issues a ``COMMIT`` statement first, then executes the SQL script it
gets as a parameter. This method disregards :attr:`isolation_level`; any
- transation control must be added to *sql_script*.
+ transaction control must be added to *sql_script*.
*sql_script* can be an instance of :class:`str`.
| | index given by *i* | |
| | (same as ``s[i:i] = [x]``) | |
+------------------------------+--------------------------------+---------------------+
-| ``s.pop([i])`` | retrieves the item at *i* and | \(2) |
+| ``s.pop()`` or ``s.pop(i)`` | retrieves the item at *i* and | \(2) |
| | also removes it from *s* | |
+------------------------------+--------------------------------+---------------------+
| ``s.remove(x)`` | remove the first item from *s* | \(3) |
The most common exception raised is :exc:`OSError`. This occurs, for example,
when trying to execute a non-existent file. Applications should prepare for
-:exc:`OSError` exceptions.
+:exc:`OSError` exceptions. Note that, when ``shell=True``, :exc:`OSError`
+will be raised by the child only if the selected shell itself was not found.
+To determine if the shell failed to find the requested application, it is
+necessary to check the return code or output from the subprocess.
A :exc:`ValueError` will be raised if :class:`Popen` is called with invalid
arguments.
.. versionadded:: 3.3
The :exc:`SubprocessError` base class was added.
+.. _subprocess-security:
Security Considerations
-----------------------
If *expand* is set to ``False``, the path will not be expanded using the
variables.
- If *name* is not found, return ``None``.
+ If *name* is not found, raise a :exc:`KeyError`.
.. function:: get_paths([scheme, [vars, [expand]]])
``'x:bz2'``, :func:`tarfile.open` accepts the keyword argument
*compresslevel* (default ``9``) to specify the compression level of the file.
+ For modes ``'w:xz'`` and ``'x:xz'``, :func:`tarfile.open` accepts the
+ keyword argument *preset* to specify the compression level of the file.
+
For special purposes, there is a second format for *mode*:
``'filemode|[compression]'``. :func:`tarfile.open` will return a :class:`TarFile`
object that processes its data as a stream of blocks. No random seeking will
>>>
# directory and contents have been removed
+.. _tempfile-mktemp-deprecated:
Deprecated functions and variables
----------------------------------
functions should be good enough; otherwise, you should use an instance of
:class:`TextWrapper` for efficiency.
-.. function:: wrap(text, width=70, **kwargs)
+.. function:: wrap(text, width=70, *, initial_indent="", \
+ subsequent_indent="", expand_tabs=True, \
+ replace_whitespace=True, fix_sentence_endings=False, \
+ break_long_words=True, drop_whitespace=True, \
+ break_on_hyphens=True, tabsize=8, max_lines=None)
Wraps the single paragraph in *text* (a string) so every line is at most
*width* characters long. Returns a list of output lines, without final
newlines.
Optional keyword arguments correspond to the instance attributes of
- :class:`TextWrapper`, documented below. *width* defaults to ``70``.
+ :class:`TextWrapper`, documented below.
See the :meth:`TextWrapper.wrap` method for additional details on how
:func:`wrap` behaves.
-.. function:: fill(text, width=70, **kwargs)
+.. function:: fill(text, width=70, *, initial_indent="", \
+ subsequent_indent="", expand_tabs=True, \
+ replace_whitespace=True, fix_sentence_endings=False, \
+ break_long_words=True, drop_whitespace=True, \
+ break_on_hyphens=True, tabsize=8, \
+ max_lines=None)
Wraps the single paragraph in *text*, and returns a single string containing the
wrapped paragraph. :func:`fill` is shorthand for ::
:func:`wrap`.
-.. function:: shorten(text, width, **kwargs)
+.. function:: shorten(text, width, *, fix_sentence_endings=False, \
+ break_long_words=True, break_on_hyphens=True, \
+ placeholder=' [...]')
Collapse and truncate the given *text* to fit in the given *width*.
.. versionadded:: 3.4
-
.. function:: dedent(text)
Remove any common leading whitespace from every line in *text*.
use :mod:`tkinter`, you don't need to write Tcl code, but you will need to
consult the Tk documentation, and occasionally the Tcl documentation.
:mod:`tkinter` is a set of wrappers that implement the Tk widgets as Python
-classes. In addition, the internal module :mod:`_tkinter` provides a threadsafe
-mechanism which allows Python and Tcl to interact.
+classes.
:mod:`tkinter`'s chief virtues are that it is fast, and that it usually comes
bundled with Python. Although its standard documentation is weak, good
material is available, which includes: references, tutorials, a book and
others. :mod:`tkinter` is also famous for having an outdated look and feel,
which has been vastly improved in Tk 8.5. Nevertheless, there are many other
-GUI libraries that you could be interested in. For more information about
-alternatives, see the :ref:`other-gui-packages` section.
+GUI libraries that you could be interested in. The Python wiki lists several
+alternative `GUI frameworks and tools <https://wiki.python.org/moin/GuiProgramming>`_.
.. toctree::
tkinter.ttk.rst
tkinter.tix.rst
idle.rst
- othergui.rst
.. Other sections I have in mind are
Tkinter internals
--------------
The :mod:`tkinter` package ("Tk interface") is the standard Python interface to
-the Tk GUI toolkit. Both Tk and :mod:`tkinter` are available on most Unix
-platforms, as well as on Windows systems. (Tk itself is not part of Python; it
-is maintained at ActiveState.)
+the Tcl/Tk GUI toolkit. Both Tk and :mod:`tkinter` are available on most Unix
+platforms, including macOS, as well as on Windows systems.
Running ``python -m tkinter`` from the command line should open a window
demonstrating a simple Tk interface, letting you know that :mod:`tkinter` is
.. seealso::
- Tkinter documentation:
+ * `TkDocs <http://tkdocs.com/>`_
+ Extensive tutorial on creating user interfaces with Tkinter. Explains key concepts,
+ and illustrates recommended approaches using the modern API.
- `Python Tkinter Resources <https://wiki.python.org/moin/TkInter>`_
- The Python Tkinter Topic Guide provides a great deal of information on using Tk
- from Python and links to other sources of information on Tk.
+ * `Tkinter 8.5 reference: a GUI for Python <https://www.tkdocs.com/shipman/>`_
+ Reference documentation for Tkinter 8.5 detailing available classes, methods, and options.
- `TKDocs <http://www.tkdocs.com/>`_
- Extensive tutorial plus friendlier widget pages for some of the widgets.
+ Tcl/Tk Resources:
- `Tkinter 8.5 reference: a GUI for Python <https://www.tkdocs.com/shipman/>`_
- On-line reference material.
+ * `Tk commands <https://www.tcl.tk/man/tcl8.6/TkCmd/contents.htm>`_
+ Comprehensive reference to each of the underlying Tcl/Tk commands used by Tkinter.
- `Tkinter docs from effbot <http://effbot.org/tkinterbook/>`_
- Online reference for tkinter supported by effbot.org.
+ * `Tcl/Tk Home Page <https://www.tcl.tk>`_
+ Additional documentation, and links to Tcl/Tk core development.
- `Programming Python <http://learning-python.com/about-pp4e.html>`_
- Book by Mark Lutz, has excellent coverage of Tkinter.
+ Books:
- `Modern Tkinter for Busy Python Developers <https://www.amazon.com/Modern-Tkinter-Python-Developers-ebook/dp/B0071QDNLO/>`_
- Book by Mark Roseman about building attractive and modern graphical user interfaces with Python and Tkinter.
+ * `Modern Tkinter for Busy Python Developers <https://tkdocs.com/book.html>`_
+ By Mark Roseman. (ISBN 978-1999149567)
- `Python and Tkinter Programming <https://www.manning.com/books/python-and-tkinter-programming>`_
- Book by John Grayson (ISBN 1-884777-81-3).
+ * `Python and Tkinter Programming <https://www.packtpub.com/product/python-gui-programming-with-tkinter/9781788835886>`_
+ By Alan Moore. (ISBN 978-1788835886)
- Tcl/Tk documentation:
+ * `Programming Python <http://learning-python.com/about-pp4e.html>`_
+ By Mark Lutz; has excellent coverage of Tkinter. (ISBN 978-0596158101)
- `Tk commands <https://www.tcl.tk/man/tcl8.6/TkCmd/contents.htm>`_
- Most commands are available as :mod:`tkinter` or :mod:`tkinter.ttk` classes.
- Change '8.6' to match the version of your Tcl/Tk installation.
-
- `Tcl/Tk recent man pages <https://www.tcl.tk/doc/>`_
- Recent Tcl/Tk manuals on www.tcl.tk.
-
- `ActiveState Tcl Home Page <https://tcl.tk>`_
- The Tk/Tcl development is largely taking place at ActiveState.
-
- `Tcl and the Tk Toolkit <https://www.amazon.com/exec/obidos/ASIN/020163337X>`_
- Book by John Ousterhout, the inventor of Tcl.
-
- `Practical Programming in Tcl and Tk <http://www.beedub.com/book/>`_
- Brent Welch's encyclopedic book.
+ * `Tcl and the Tk Toolkit (2nd edition) <https://www.amazon.com/exec/obidos/ASIN/032133633X>`_
+ By John Ousterhout, inventor of Tcl/Tk, and Ken Jones; does not cover Tkinter. (ISBN 978-0321336330)
Tkinter Modules
---------------
-Most of the time, :mod:`tkinter` is all you really need, but a number of
-additional modules are available as well. The Tk interface is located in a
-binary module named :mod:`_tkinter`. This module contains the low-level
-interface to Tk, and should never be used directly by application programmers.
-It is usually a shared library (or DLL), but might in some cases be statically
-linked with the Python interpreter.
-
-In addition to the Tk interface module, :mod:`tkinter` includes a number of
-Python modules, :mod:`tkinter.constants` being one of the most important.
-Importing :mod:`tkinter` will automatically import :mod:`tkinter.constants`,
-so, usually, to use Tkinter all you need is a simple import statement::
-
- import tkinter
+Support for Tkinter is spread across several modules. Most applications will need the
+main :mod:`tkinter` module, as well as the :mod:`tkinter.ttk` module, which provides
+the modern themed widget set and API::
-Or, more often::
from tkinter import *
+ from tkinter import ttk
.. class:: Tk(screenName=None, baseName=None, className='Tk', useTk=1)
subsystem initialized) by calling its :meth:`loadtk` method.
-Other modules that provide Tk support include:
+The modules that provide Tk support include:
+
+:mod:`tkinter`
+ Main Tkinter module.
:mod:`tkinter.colorchooser`
Dialog to let the user choose a color.
:mod:`tkinter.simpledialog`
Basic dialogs and convenience functions.
+:mod:`tkinter.ttk`
+ Themed widget set introduced in Tk 8.5, providing modern alternatives
+ for many of the classic widgets in the main :mod:`tkinter` module.
+
+Additional modules:
+
+:mod:`_tkinter`
+ A binary module that contains the low-level interface to Tcl/Tk.
+ It is automatically imported by the main :mod:`tkinter` module,
+ and should never be used directly by application programmers.
+ It is usually a shared library (or DLL), but might in some cases be
+ statically linked with the Python interpreter.
+
+:mod:`idlelib`
+ Python's Integrated Development and Learning Environment (IDLE). Based
+ on :mod:`tkinter`.
+
+:mod:`tkinter.constants`
+ Symbolic constants that can be used in place of strings when passing
+ various parameters to Tkinter calls. Automatically imported by the
+ main :mod:`tkinter` module.
+
:mod:`tkinter.dnd`
- Drag-and-drop support for :mod:`tkinter`. This is experimental and should
- become deprecated when it is replaced with the Tk DND.
+ (experimental) Drag-and-drop support for :mod:`tkinter`. This will
+ become deprecated when it is replaced with the Tk DND.
+
+:mod:`tkinter.tix`
+ (deprecated) An older third-party Tcl/Tk package that adds several new
+ widgets. Better alternatives for most can be found in :mod:`tkinter.ttk`.
:mod:`turtle`
Turtle graphics in a Tk window.
Window control
| :func:`bgcolor`
| :func:`bgpic`
- | :func:`clear` | :func:`clearscreen`
- | :func:`reset` | :func:`resetscreen`
+ | :func:`clearscreen`
+ | :func:`resetscreen`
| :func:`screensize`
| :func:`setworldcoordinates`
~~~~~~~~~~~~~~~~~~~~
.. function:: reset()
- :noindex:
Delete the turtle's drawings from the screen, re-center the turtle and set
variables to the default values.
.. function:: clear()
- :noindex:
Delete the turtle's drawings from the screen. Do not move turtle. State and
position of the turtle as well as drawings of other turtles are not affected.
.. function:: clear()
- clearscreen()
-
- Delete all drawings and all turtles from the TurtleScreen. Reset the now
- empty TurtleScreen to its initial state: white background, no background
- image, no event bindings and tracing on.
+ :noindex:
.. note::
This TurtleScreen method is available as a global function only under the
derived from the Turtle method ``clear``.
-.. function:: reset()
- resetscreen()
+.. function:: clearscreen()
- Reset all Turtles on the Screen to their initial state.
+ Delete all drawings and all turtles from the TurtleScreen. Reset the now
+ empty TurtleScreen to its initial state: white background, no background
+ image, no event bindings and tracing on.
+
+
+.. function:: reset()
+ :noindex:
.. note::
This TurtleScreen method is available as a global function only under the
derived from the Turtle method ``reset``.
+.. function:: resetscreen()
+
+ Reset all Turtles on the Screen to their initial state.
+
+
.. function:: screensize(canvwidth=None, canvheight=None, bg=None)
:param canvwidth: positive integer, new width of canvas in pixels
>>> real.method.return_value = sentinel.some_object
>>> result = real.method()
>>> assert result is sentinel.some_object
- >>> sentinel.some_object
+ >>> result
sentinel.some_object
``-t`` opens the URL in a new browser page ("tab"). The options are,
naturally, mutually exclusive. Usage example::
- python -m webbrowser -t "http://www.python.org"
+ python -m webbrowser -t "https://www.python.org"
The following exception is defined:
Here are some simple examples::
- url = 'http://docs.python.org/'
+ url = 'https://docs.python.org/'
# Open URL in a new tab, if a browser window is already open.
webbrowser.open_new_tab(url)
For the :class:`Document` node, an additional keyword argument *encoding* can
be used to specify the encoding field of the XML header.
- Silimarly, explicitly stating the *standalone* argument causes the
+ Similarly, explicitly stating the *standalone* argument causes the
standalone document declarations to be added to the prologue of the XML
document.
If the value is set to `True`, `standalone="yes"` is added,
The following table gives an overview of the known attacks and whether
the various modules are vulnerable to them.
-========================= ============== =============== ============== ============== ==============
-kind sax etree minidom pulldom xmlrpc
-========================= ============== =============== ============== ============== ==============
-billion laughs **Vulnerable** **Vulnerable** **Vulnerable** **Vulnerable** **Vulnerable**
-quadratic blowup **Vulnerable** **Vulnerable** **Vulnerable** **Vulnerable** **Vulnerable**
-external entity expansion Safe (4) Safe (1) Safe (2) Safe (4) Safe (3)
-`DTD`_ retrieval Safe (4) Safe Safe Safe (4) Safe
-decompression bomb Safe Safe Safe Safe **Vulnerable**
-========================= ============== =============== ============== ============== ==============
-
-1. :mod:`xml.etree.ElementTree` doesn't expand external entities and raises a
+========================= ================== ================== ================== ================== ==================
+kind sax etree minidom pulldom xmlrpc
+========================= ================== ================== ================== ================== ==================
+billion laughs **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1)
+quadratic blowup **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1)
+external entity expansion Safe (5) Safe (2) Safe (3) Safe (5) Safe (4)
+`DTD`_ retrieval Safe (5) Safe Safe Safe (5) Safe
+decompression bomb Safe Safe Safe Safe **Vulnerable**
+========================= ================== ================== ================== ================== ==================
+
+1. Expat 2.4.1 and newer is not vulnerable to the "billion laughs" and
+ "quadratic blowup" vulnerabilities. Items still listed as vulnerable due to
+ potential reliance on system-provided libraries. Check
+ :data:`pyexpat.EXPAT_VERSION`.
+2. :mod:`xml.etree.ElementTree` doesn't expand external entities and raises a
:exc:`ParserError` when an entity occurs.
-2. :mod:`xml.dom.minidom` doesn't expand external entities and simply returns
+3. :mod:`xml.dom.minidom` doesn't expand external entities and simply returns
the unexpanded entity verbatim.
-3. :mod:`xmlrpclib` doesn't expand external entities and omits them.
-4. Since Python 3.7.1, external general entities are no longer processed by
+4. :mod:`xmlrpclib` doesn't expand external entities and omits them.
+5. Since Python 3.7.1, external general entities are no longer processed by
default.
of the last modification to the file; the fields are described in section
:ref:`zipinfo-objects`.
-
.. function:: is_zipfile(filename)
Returns ``True`` if *filename* is a valid ZIP file based on its magic number,
If ``arcname`` (or ``filename``, if ``arcname`` is not given) contains a null
byte, the name of the file in the archive will be truncated at the null byte.
+ .. note::
+
+ A leading slash in the filename may lead to the archive being impossible to
+ open in some zip programs on Windows systems.
+
.. versionchanged:: 3.6
Calling :meth:`write` on a ZipFile created with mode ``'r'`` or
a closed ZipFile will raise a :exc:`ValueError`. Previously,
Such as allowable characters in the directory entries, length of the file name,
length of the pathname, size of a single file, and number of files, etc.
+.. _zipfile-resources-limitations:
+
Resources limitations
~~~~~~~~~~~~~~~~~~~~~
Raising a negative number to a fractional power results in a :class:`complex`
number. (In earlier versions it raised a :exc:`ValueError`.)
+This operation can be customized using the special :meth:`__pow__` method.
.. _unary:
single: operator; - (minus)
single: - (minus); unary operator
-The unary ``-`` (minus) operator yields the negation of its numeric argument.
+The unary ``-`` (minus) operator yields the negation of its numeric argument; the
+operation can be overridden with the :meth:`__neg__` special method.
.. index::
single: plus
single: operator; + (plus)
single: + (plus); unary operator
-The unary ``+`` (plus) operator yields its numeric argument unchanged.
+The unary ``+`` (plus) operator yields its numeric argument unchanged; the
+operation can be overridden with the :meth:`__pos__` special method.
.. index::
single: inversion
The unary ``~`` (invert) operator yields the bitwise inversion of its integer
argument. The bitwise inversion of ``x`` is defined as ``-(x+1)``. It only
-applies to integral numbers.
+applies to integral numbers or to custom objects that override the
+:meth:`__invert__` special method.
+
+
.. index:: exception: TypeError
common type and then multiplied together. In the latter case, sequence
repetition is performed; a negative repetition factor yields an empty sequence.
+This operation can be customized using the special :meth:`__mul__` and
+:meth:`__rmul__` methods.
+
.. index::
single: matrix multiplication
operator: @ (at)
applied to the result. Division by zero raises the :exc:`ZeroDivisionError`
exception.
+This operation can be customized using the special :meth:`__truediv__` and
+:meth:`__floordiv__` methods.
+
.. index::
single: modulo
operator: % (percent)
known as interpolation). The syntax for string formatting is described in the
Python Library Reference, section :ref:`old-string-formatting`.
+The *modulo* operation can be customized using the special :meth:`__mod__` method.
+
The floor division operator, the modulo operator, and the :func:`divmod`
function are not defined for complex numbers. Instead, convert to a floating
point number using the :func:`abs` function if appropriate.
former case, the numbers are converted to a common type and then added together.
In the latter case, the sequences are concatenated.
+This operation can be customized using the special :meth:`__add__` and
+:meth:`__radd__` methods.
+
.. index::
single: subtraction
single: operator; - (minus)
The ``-`` (subtraction) operator yields the difference of its arguments. The
numeric arguments are first converted to a common type.
+This operation can be customized using the special :meth:`__sub__` method.
+
.. _shifting:
These operators accept integers as arguments. They shift the first argument to
the left or right by the number of bits given by the second argument.
+This operation can be customized using the special :meth:`__lshift__` and
+:meth:`__rshift__` methods.
+
.. index:: exception: ValueError
A right shift by *n* bits is defined as floor division by ``pow(2,n)``. A left
operator: & (ampersand)
The ``&`` operator yields the bitwise AND of its arguments, which must be
-integers.
+integers or one of them must be a custom object overriding :meth:`__and__` or
+:meth:`__rand__` special methods.
.. index::
pair: bitwise; xor
operator: ^ (caret)
The ``^`` operator yields the bitwise XOR (exclusive OR) of its arguments, which
-must be integers.
+must be integers or one of them must be a custom object overriding :meth:`__xor__` or
+:meth:`__rxor__` special methods.
.. index::
pair: bitwise; or
operator: | (vertical bar)
The ``|`` operator yields the bitwise (inclusive) OR of its arguments, which
-must be integers.
+must be integers or one of them must be a custom object overriding :meth:`__or__` or
+:meth:`__ror__` special methods.
.. _comparisons:
comp_operator: "<" | ">" | "==" | ">=" | "<=" | "!="
: | "is" ["not"] | ["not"] "in"
-Comparisons yield boolean values: ``True`` or ``False``.
+Comparisons yield boolean values: ``True`` or ``False``. Custom
+:dfn:`rich comparison methods` may return non-boolean values. In this case
+Python will call :func:`bool` on such value in boolean contexts.
.. index:: pair: chaining; comparisons
{% block body %}
<h1>{{ docstitle|e }}</h1>
<p>
- {% trans %}Welcome! This is the documentation for Python {{ release }}.{% endtrans %}
+ {% trans %}Welcome! This is the official documentation for Python {{ release }}.{% endtrans %}
</p>
<p><strong>{% trans %}Parts of the documentation:{% endtrans %}</strong></p>
<table class="contentstable" align="center"><tr>
>>> function(0, a=0)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
- TypeError: function() got multiple values for keyword argument 'a'
+ TypeError: function() got multiple values for argument 'a'
When a final formal parameter of the form ``**name`` is present, it receives a
dictionary (see :ref:`typesmapping`) containing all keyword arguments except for
>>> pos_only_arg(arg=1)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
- TypeError: pos_only_arg() got an unexpected keyword argument 'arg'
+ TypeError: pos_only_arg() got some positional-only arguments passed as keyword arguments: 'arg'
The third function ``kwd_only_args`` only allows keyword arguments as indicated
by a ``*`` in the function definition::
>>> combined_example(pos_only=1, standard=2, kwd_only=3)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
- TypeError: combined_example() got an unexpected keyword argument 'pos_only'
+ TypeError: combined_example() got some positional-only arguments passed as keyword arguments: 'pos_only'
Finally, consider this function definition which has a potential collision between the positional argument ``name`` and ``**kwds`` which has ``name`` as a key::
>>> '2' + 2
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
- TypeError: Can't convert 'int' object to str implicitly
+ TypeError: can only concatenate str (not "int") to str
The last line of the error message indicates what happened. Exceptions come in
different types, and the type is printed as part of the message: the types in
>>> word[2:5] # characters from position 2 (included) to 5 (excluded)
'tho'
-Note how the start is always included, and the end always excluded. This
-makes sure that ``s[:i] + s[i:]`` is always equal to ``s``::
-
- >>> word[:2] + word[2:]
- 'Python'
- >>> word[:4] + word[4:]
- 'Python'
-
Slice indices have useful defaults; an omitted first index defaults to zero, an
omitted second index defaults to the size of the string being sliced. ::
>>> word[-2:] # characters from the second-last (included) to the end
'on'
+Note how the start is always included, and the end always excluded. This
+makes sure that ``s[:i] + s[i:]`` is always equal to ``s``::
+
+ >>> word[:2] + word[2:]
+ 'Python'
+ >>> word[:4] + word[4:]
+ 'Python'
+
One way to remember how slices work is to think of the indices as pointing
*between* characters, with the left edge of the first character numbered 0.
Then the right edge of the last character of a string of *n* characters has
functions internally. For more details, please see their respective
documentation.
(Contributed by Adam Goldschmidt, Senthil Kumaran and Ken Jin in :issue:`42967`.)
+
+Notable changes in Python 3.8.12
+================================
+
+Starting with Python 3.8.12 the :mod:`ipaddress` module no longer accepts
+any leading zeros in IPv4 address strings. Leading zeros are ambiguous and
+interpreted as octal notation by some libraries. For example the legacy
+function :func:`socket.inet_aton` treats leading zeros as octal notation.
+glibc implementation of modern :func:`~socket.inet_pton` does not accept
+any leading zeros.
+
+(Originally contributed by Christian Heimes in :issue:`36384`, and backported
+to 3.8 by Achraf Merzouki.)
| '(' a=starred_expression ')' {
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "can't use starred expression here") }
invalid_import_from_targets:
- | import_from_as_names ',' {
+ | import_from_as_names ',' NEWLINE {
RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") }
fills newly allocated memory with CLEANBYTE (0xCD) and newly freed memory
with DEADBYTE (0xDD). Detect also "untouchable bytes" marked
with FORBIDDENBYTE (0xFD). */
-static inline int _PyMem_IsPtrFreed(void *ptr)
+static inline int _PyMem_IsPtrFreed(const void *ptr)
{
uintptr_t value = (uintptr_t)ptr;
#if SIZEOF_VOID_P == 8
/*--start constants--*/
#define PY_MAJOR_VERSION 3
#define PY_MINOR_VERSION 9
-#define PY_MICRO_VERSION 6
+#define PY_MICRO_VERSION 7
#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL
#define PY_RELEASE_SERIAL 0
/* Version as a string */
-#define PY_VERSION "3.9.6"
+#define PY_VERSION "3.9.7"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
hx = hash(x)
h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
h &= MASK
+ h ^= (h >> 11) ^ (h >> 25)
h = h * 69069 + 907133923
h &= MASK
if h > MAX:
self.update(data)
def _commit_removals(self):
- l = self._pending_removals
+ pop = self._pending_removals.pop
discard = self.data.discard
- while l:
- discard(l.pop())
+ while True:
+ try:
+ item = pop()
+ except IndexError:
+ return
+ discard(item)
def __iter__(self):
with _IterationGuard(self):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
+ elif argument.choices:
+ return '{' + ','.join(argument.choices) + '}'
else:
return None
_option_strings.append(option_string)
if help is not None and default is not None:
- help += f" (default: {default})"
+ help += " (default: %(default)s)"
super().__init__(
option_strings=_option_strings,
async def sleep(delay, result=None, *, loop=None):
"""Coroutine that completes after a given time (in seconds)."""
+ if loop is not None:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
+
if delay <= 0:
await __sleep0()
return result
if loop is None:
loop = events.get_running_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
future = loop.create_future()
h = loop.call_later(delay,
after catching an exception (raised by one of the awaitables) from
gather won't cancel any other awaitables.
"""
+ if loop is not None:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
+
if not coros_or_futures:
if loop is None:
loop = events.get_event_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
outer = loop.create_future()
outer.set_result([])
return outer
"""Asynchronously run function *func* in a separate thread.
Any *args and **kwargs supplied for this function are directly passed
- to *func*. Also, the current :class:`contextvars.Context` is propogated,
+ to *func*. Also, the current :class:`contextvars.Context` is propagated,
allowing context variables from the main thread to be accessed in the
separate thread.
if not force:
try:
mtime = int(os.stat(fullname).st_mtime)
- expect = struct.pack('<4sll', importlib.util.MAGIC_NUMBER,
- 0, mtime)
+ expect = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER,
+ 0, mtime & 0xFFFF_FFFF)
for cfile in opt_cfiles.values():
with open(cfile, 'rb') as chandle:
actual = chandle.read(12)
else:
print('*** ', end='')
# escape non-printable characters in msg
- msg = err.msg.encode(sys.stdout.encoding,
- errors='backslashreplace')
- msg = msg.decode(sys.stdout.encoding)
+ encoding = sys.stdout.encoding or sys.getdefaultencoding()
+ msg = err.msg.encode(encoding, errors='backslashreplace').decode(encoding)
print(msg)
except (SyntaxError, UnicodeError, OSError) as e:
success = False
# for the class instead.
# See http://bugs.python.org/issue19404 for more details.
-
-class _GeneratorContextManager(_GeneratorContextManagerBase,
- AbstractContextManager,
- ContextDecorator):
- """Helper for @contextmanager decorator."""
-
def _recreate_cm(self):
- # _GCM instances are one-shot context managers, so the
+ # _GCMB instances are one-shot context managers, so the
# CM must be recreated each time a decorated function is
# called
return self.__class__(self.func, self.args, self.kwds)
+
+class _GeneratorContextManager(
+ _GeneratorContextManagerBase,
+ AbstractContextManager,
+ ContextDecorator,
+):
+ """Helper for @contextmanager decorator."""
+
def __enter__(self):
# do not keep args and kwds alive unnecessarily
# they are only needed for recreation, which is not possible anymore
except StopIteration:
raise RuntimeError("generator didn't yield") from None
- def __exit__(self, type, value, traceback):
- if type is None:
+ def __exit__(self, typ, value, traceback):
+ if typ is None:
try:
next(self.gen)
except StopIteration:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
- value = type()
+ value = typ()
try:
- self.gen.throw(type, value, traceback)
+ self.gen.throw(typ, value, traceback)
except StopIteration as exc:
# Suppress StopIteration *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# Don't re-raise the passed in exception. (issue27122)
if exc is value:
return False
- # Likewise, avoid suppressing if a StopIteration exception
+ # Avoid suppressing if a StopIteration exception
# was passed to throw() and later wrapped into a RuntimeError
- # (see PEP 479).
- if type is StopIteration and exc.__cause__ is value:
+ # (see PEP 479 for sync generators; async generators also
+ # have this behavior). But do this only if the exception wrapped
+ # by the RuntimeError is actually Stop(Async)Iteration (see
+ # issue29692).
+ if (
+ isinstance(value, StopIteration)
+ and exc.__cause__ is value
+ ):
return False
raise
- except:
+ except BaseException as exc:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
- #
- # This cannot use 'except BaseException as exc' (as in the
- # async implementation) to maintain compatibility with
- # Python 2, where old-style class exceptions are not caught
- # by 'except BaseException'.
- if sys.exc_info()[1] is value:
- return False
- raise
+ if exc is not value:
+ raise
+ return False
raise RuntimeError("generator didn't stop after throw()")
class _AsyncGeneratorContextManager(_GeneratorContextManagerBase,
AbstractAsyncContextManager):
- """Helper for @asynccontextmanager."""
+ """Helper for @asynccontextmanager decorator."""
async def __aenter__(self):
+ # do not keep args and kwds alive unnecessarily
+ # they are only needed for recreation, which is not possible anymore
+ del self.args, self.kwds, self.func
try:
return await self.gen.__anext__()
except StopAsyncIteration:
try:
await self.gen.__anext__()
except StopAsyncIteration:
- return
+ return False
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
+ # Need to force instantiation so we can reliably
+ # tell if we get the same exception back
value = typ()
- # See _GeneratorContextManager.__exit__ for comments on subtleties
- # in this implementation
try:
await self.gen.athrow(typ, value, traceback)
- raise RuntimeError("generator didn't stop after athrow()")
except StopAsyncIteration as exc:
+ # Suppress StopIteration *unless* it's the same exception that
+ # was passed to throw(). This prevents a StopIteration
+ # raised inside the "with" statement from being suppressed.
return exc is not value
except RuntimeError as exc:
+ # Don't re-raise the passed in exception. (issue27122)
if exc is value:
return False
- # Avoid suppressing if a StopIteration exception
- # was passed to throw() and later wrapped into a RuntimeError
+ # Avoid suppressing if a Stop(Async)Iteration exception
+ # was passed to athrow() and later wrapped into a RuntimeError
# (see PEP 479 for sync generators; async generators also
# have this behavior). But do this only if the exception wrapped
# by the RuntimeError is actully Stop(Async)Iteration (see
# issue29692).
- if isinstance(value, (StopIteration, StopAsyncIteration)):
- if exc.__cause__ is value:
- return False
+ if (
+ isinstance(value, (StopIteration, StopAsyncIteration))
+ and exc.__cause__ is value
+ ):
+ return False
raise
except BaseException as exc:
+ # only re-raise if it's *not* the exception that was
+ # passed to throw(), because __exit__() must not raise
+ # an exception unless __exit__() itself failed. But throw()
+ # has to raise the exception to signal propagation, so this
+ # fixes the impedance mismatch between the throw() protocol
+ # and the __exit__() protocol.
if exc is not value:
raise
+ return False
+ raise RuntimeError("generator didn't stop after athrow()")
def contextmanager(func):
(9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9))
def test_sf1651235(self):
- # see http://www.python.org/sf/1651235
+ # see https://www.python.org/sf/1651235
proto = CFUNCTYPE(c_int, RECT, POINT)
def callback(*args):
# NOT fit into a 32-bit integer. FreeLibrary must be able
# to accept this address.
- # These are tests for http://www.python.org/sf/1703286
+ # These are tests for https://www.python.org/sf/1703286
handle = LoadLibrary("advapi32")
FreeLibrary(handle)
There's a full documentation available at:
- http://docs.python.org/distutils/
+ https://docs.python.org/distutils/
The Distutils-SIG web page is also a good starting point:
- http://www.python.org/sigs/distutils-sig/
+ https://www.python.org/sigs/distutils-sig/
$Id$
if not data:
return
data = data.split()
+ if not data: # This happens for whitespace-only input.
+ return None
# The FWS after the comma after the day-of-week is optional, so search and
# adjust for this.
if data[0].endswith(',') or data[0].lower() in _daynames:
data = binascii.b2a_qp(data, istext=False, header=False, quotetabs=True)
data = data.decode('ascii')
elif cte == '7bit':
- # Make sure it really is only ASCII. The early warning here seems
- # worth the overhead...if you care write your own content manager :).
- data.encode('ascii')
+ data = data.decode('ascii')
elif cte in ('8bit', 'binary'):
data = data.decode('ascii', 'surrogateescape')
msg.set_payload(data)
if subtype in preferencelist:
yield (preferencelist.index(subtype), part)
return
- if maintype != 'multipart':
+ if maintype != 'multipart' or not self.is_multipart():
return
if subtype != 'related':
for subpart in part.iter_parts():
Return an empty iterator for a non-multipart.
"""
- if self.get_content_maintype() == 'multipart':
+ if self.is_multipart():
yield from self.get_payload()
def get_content(self, *args, content_manager=None, **kw):
def getaddresses(fieldvalues):
"""Return a list of (REALNAME, EMAIL) for each fieldvalue."""
- all = COMMASPACE.join(fieldvalues)
+ all = COMMASPACE.join(str(v) for v in fieldvalues)
a = _AddressList(all)
return a.addresslist
__all__ = ["version", "bootstrap"]
-_SETUPTOOLS_VERSION = "56.0.0"
+_SETUPTOOLS_VERSION = "57.4.0"
-_PIP_VERSION = "21.1.3"
+_PIP_VERSION = "21.2.3"
_PROJECTS = [
("setuptools", _SETUPTOOLS_VERSION, "py3"),
f2 -- Second file name
- shallow -- Just check stat signature (do not read the files).
- defaults to True.
+ shallow -- treat files as identical if their stat signatures (type, size,
+ mtime) are identical. Otherwise, files are considered different
+ if their sizes or contents differ. [default: True]
Return value:
def _gt_from_lt(self, other, NotImplemented=NotImplemented):
'Return a > b. Computed by @total_ordering from (not a < b) and (a != b).'
- op_result = self.__lt__(other)
+ op_result = type(self).__lt__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result and self != other
def _le_from_lt(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (a < b) or (a == b).'
- op_result = self.__lt__(other)
+ op_result = type(self).__lt__(self, other)
if op_result is NotImplemented:
return op_result
return op_result or self == other
def _ge_from_lt(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (not a < b).'
- op_result = self.__lt__(other)
+ op_result = type(self).__lt__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result
def _ge_from_le(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (not a <= b) or (a == b).'
- op_result = self.__le__(other)
+ op_result = type(self).__le__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result or self == other
def _lt_from_le(self, other, NotImplemented=NotImplemented):
'Return a < b. Computed by @total_ordering from (a <= b) and (a != b).'
- op_result = self.__le__(other)
+ op_result = type(self).__le__(self, other)
if op_result is NotImplemented:
return op_result
return op_result and self != other
def _gt_from_le(self, other, NotImplemented=NotImplemented):
'Return a > b. Computed by @total_ordering from (not a <= b).'
- op_result = self.__le__(other)
+ op_result = type(self).__le__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result
def _lt_from_gt(self, other, NotImplemented=NotImplemented):
'Return a < b. Computed by @total_ordering from (not a > b) and (a != b).'
- op_result = self.__gt__(other)
+ op_result = type(self).__gt__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result and self != other
def _ge_from_gt(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (a > b) or (a == b).'
- op_result = self.__gt__(other)
+ op_result = type(self).__gt__(self, other)
if op_result is NotImplemented:
return op_result
return op_result or self == other
def _le_from_gt(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (not a > b).'
- op_result = self.__gt__(other)
+ op_result = type(self).__gt__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result
def _le_from_ge(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (not a >= b) or (a == b).'
- op_result = self.__ge__(other)
+ op_result = type(self).__ge__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result or self == other
def _gt_from_ge(self, other, NotImplemented=NotImplemented):
'Return a > b. Computed by @total_ordering from (a >= b) and (a != b).'
- op_result = self.__ge__(other)
+ op_result = type(self).__ge__(self, other)
if op_result is NotImplemented:
return op_result
return op_result and self != other
def _lt_from_ge(self, other, NotImplemented=NotImplemented):
'Return a < b. Computed by @total_ordering from (not a >= b).'
- op_result = self.__ge__(other)
+ op_result = type(self).__ge__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
- See: http://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
+ See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
"""
def _c3_merge(sequences):
"""Merges MROs in *sequences* to a single MRO using the C3 algorithm.
- Adapted from http://www.python.org/download/releases/2.3/mro/.
+ Adapted from https://www.python.org/download/releases/2.3/mro/.
"""
result = []
# Mapping status codes to official W3C names
responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()}
+# maximal amount of data to read at one time in _safe_read
+MAXAMOUNT = 1048576
+
# maximal line length when calling readline().
_MAXLINE = 65536
_MAXHEADERS = 100
raise IncompleteRead(bytes(b[0:total_bytes]))
def _safe_read(self, amt):
- """Read the number of bytes requested.
+ """Read the number of bytes requested, compensating for partial reads.
+
+ Normally, we have a blocking socket, but a read() can be interrupted
+ by a signal (resulting in a partial read).
+
+ Note that we cannot distinguish between EOF and an interrupt when zero
+ bytes have been read. IncompleteRead() will be raised in this
+ situation.
This function should be used when <amt> bytes "should" be present for
reading. If the bytes are truly not available (due to EOF), then the
IncompleteRead exception can be used to detect the problem.
"""
- data = self.fp.read(amt)
- if len(data) < amt:
- raise IncompleteRead(data, amt-len(data))
- return data
+ s = []
+ while amt > 0:
+ chunk = self.fp.read(min(amt, MAXAMOUNT))
+ if not chunk:
+ raise IncompleteRead(b''.join(s), amt)
+ s.append(chunk)
+ amt -= len(chunk)
+ return b"".join(s)
def _safe_readinto(self, b):
"""Same as _safe_read, but for reading into a buffer."""
- amt = len(b)
- n = self.fp.readinto(b)
- if n < amt:
- raise IncompleteRead(bytes(b[:n]), amt-n)
- return n
+ total_bytes = 0
+ mvb = memoryview(b)
+ while total_bytes < len(b):
+ if MAXAMOUNT < len(mvb):
+ temp_mvb = mvb[0:MAXAMOUNT]
+ n = self.fp.readinto(temp_mvb)
+ else:
+ n = self.fp.readinto(mvb)
+ if not n:
+ raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b))
+ mvb = mvb[n:]
+ total_bytes += n
+ return total_bytes
def read1(self, n=-1):
"""Read with at most one underlying system call. If at least one
return False
return ("WARNING: The version of Tcl/Tk ({0}) in use may"
" be unstable.\n"
- "Visit http://www.python.org/download/mac/tcltk/"
+ "Visit https://www.python.org/download/mac/tcltk/"
" for current information.".format(patchlevel))
else:
return False
callable objects.
"""
+ _get_signature_of = functools.partial(_signature_from_callable,
+ follow_wrapper_chains=follow_wrapper_chains,
+ skip_bound_arg=skip_bound_arg,
+ sigcls=sigcls)
+
if not callable(obj):
raise TypeError('{!r} is not a callable object'.format(obj))
if isinstance(obj, types.MethodType):
# In this case we skip the first parameter of the underlying
# function (usually `self` or `cls`).
- sig = _signature_from_callable(
- obj.__func__,
- follow_wrapper_chains=follow_wrapper_chains,
- skip_bound_arg=skip_bound_arg,
- sigcls=sigcls)
+ sig = _get_signature_of(obj.__func__)
if skip_bound_arg:
return _signature_bound_method(sig)
# If the unwrapped object is a *method*, we might want to
# skip its first parameter (self).
# See test_signature_wrapped_bound_method for details.
- return _signature_from_callable(
- obj,
- follow_wrapper_chains=follow_wrapper_chains,
- skip_bound_arg=skip_bound_arg,
- sigcls=sigcls)
+ return _get_signature_of(obj)
try:
sig = obj.__signature__
# (usually `self`, or `cls`) will not be passed
# automatically (as for boundmethods)
- wrapped_sig = _signature_from_callable(
- partialmethod.func,
- follow_wrapper_chains=follow_wrapper_chains,
- skip_bound_arg=skip_bound_arg,
- sigcls=sigcls)
+ wrapped_sig = _get_signature_of(partialmethod.func)
sig = _signature_get_partial(wrapped_sig, partialmethod, (None,))
first_wrapped_param = tuple(wrapped_sig.parameters.values())[0]
skip_bound_arg=skip_bound_arg)
if isinstance(obj, functools.partial):
- wrapped_sig = _signature_from_callable(
- obj.func,
- follow_wrapper_chains=follow_wrapper_chains,
- skip_bound_arg=skip_bound_arg,
- sigcls=sigcls)
+ wrapped_sig = _get_signature_of(obj.func)
return _signature_get_partial(wrapped_sig, obj)
sig = None
# in its metaclass
call = _signature_get_user_defined_method(type(obj), '__call__')
if call is not None:
- sig = _signature_from_callable(
- call,
- follow_wrapper_chains=follow_wrapper_chains,
- skip_bound_arg=skip_bound_arg,
- sigcls=sigcls)
+ sig = _get_signature_of(call)
else:
- # Now we check if the 'obj' class has a '__new__' method
+ factory_method = None
new = _signature_get_user_defined_method(obj, '__new__')
- if new is not None:
- sig = _signature_from_callable(
- new,
- follow_wrapper_chains=follow_wrapper_chains,
- skip_bound_arg=skip_bound_arg,
- sigcls=sigcls)
- else:
- # Finally, we should have at least __init__ implemented
- init = _signature_get_user_defined_method(obj, '__init__')
- if init is not None:
- sig = _signature_from_callable(
- init,
- follow_wrapper_chains=follow_wrapper_chains,
- skip_bound_arg=skip_bound_arg,
- sigcls=sigcls)
+ init = _signature_get_user_defined_method(obj, '__init__')
+ # Now we check if the 'obj' class has an own '__new__' method
+ if '__new__' in obj.__dict__:
+ factory_method = new
+ # or an own '__init__' method
+ elif '__init__' in obj.__dict__:
+ factory_method = init
+ # If not, we take inherited '__new__' or '__init__', if present
+ elif new is not None:
+ factory_method = new
+ elif init is not None:
+ factory_method = init
+
+ if factory_method is not None:
+ sig = _get_signature_of(factory_method)
if sig is None:
# At this point we know, that `obj` is a class, with no user-
call = _signature_get_user_defined_method(type(obj), '__call__')
if call is not None:
try:
- sig = _signature_from_callable(
- call,
- follow_wrapper_chains=follow_wrapper_chains,
- skip_bound_arg=skip_bound_arg,
- sigcls=sigcls)
+ sig = _get_signature_of(call)
except ValueError as ex:
msg = 'no signature found for {!r}'.format(obj)
raise ValueError(msg) from ex
stashed = tok
continue
- if token == 'def':
+ if token in ('def', 'for'):
if (stashed
and stashed[0] == NAME
and stashed[1] == 'async'):
- async_def = True
- async_def_indent = indents[-1]
+ if token == 'def':
+ async_def = True
+ async_def_indent = indents[-1]
yield (ASYNC, stashed[1],
stashed[2], stashed[3],
s = a[-5:]
s = a[:-1]
s = a[-4:-3]
- # A rough test of SF bug 1333982. http://python.org/sf/1333982
+ # A rough test of SF bug 1333982. https://python.org/sf/1333982
# The testing here is fairly incomplete.
# Test cases should include: commas with 1 and 2 colons
d = {}
s = a[-5:]
s = a[:-1]
s = a[-4:-3]
- # A rough test of SF bug 1333982. http://python.org/sf/1333982
+ # A rough test of SF bug 1333982. https://python.org/sf/1333982
# The testing here is fairly incomplete.
# Test cases should include: commas with 1 and 2 colons
d = {}
self.validate("""await = 1""")
self.validate("""def async(): pass""")
- def test_async_with(self):
+ def test_async_for(self):
self.validate("""async def foo():
async for a in b: pass""")
- self.invalid_syntax("""def foo():
- async for a in b: pass""")
-
- def test_async_for(self):
+ def test_async_with(self):
self.validate("""async def foo():
async with a: pass""")
self.invalid_syntax("""def foo():
async with a: pass""")
+ def test_async_generator(self):
+ self.validate(
+ """async def foo():
+ return (i * 2 async for i in arange(42))"""
+ )
+ self.validate(
+ """def foo():
+ return (i * 2 async for i in arange(42))"""
+ )
+
class TestRaiseChanges(GrammarTest):
def test_2x_style_1(self):
dirName, baseName = os.path.split(self.baseFilename)
fileNames = os.listdir(dirName)
result = []
- prefix = baseName + "."
+ # See bpo-44753: Don't use the extension when computing the prefix.
+ prefix = os.path.splitext(baseName)[0] + "."
plen = len(prefix)
for fileName in fileNames:
if fileName[:plen] == prefix:
# Licensed to PSF under a Contributor Agreement.
#
-__all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token',
- 'SharedMemoryManager' ]
+__all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
#
# Imports
from . import get_context
try:
from . import shared_memory
- HAS_SHMEM = True
except ImportError:
HAS_SHMEM = False
+else:
+ HAS_SHMEM = True
+ __all__.append('SharedMemoryManager')
#
# Register some things for pickling
def AutoProxy(token, serializer, manager=None, authkey=None,
- exposed=None, incref=True):
+ exposed=None, incref=True, manager_owned=False):
'''
Return an auto-proxy for `token`
'''
ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed)
proxy = ProxyType(token, serializer, manager=manager, authkey=authkey,
- incref=incref)
+ incref=incref, manager_owned=manager_owned)
proxy._isauto = True
return proxy
return b in a
def countOf(a, b):
- "Return the number of times b occurs in a."
+ "Return the number of items in a which are, or which equal, b."
count = 0
for i in a:
- if i == b:
+ if i is b or i == b:
count += 1
return count
def indexOf(a, b):
"Return the first index of b in a."
for i, j in enumerate(a):
- if j == b:
+ if j is b or j == b:
return i
else:
raise ValueError('sequence.index(x): x not in sequence')
ext_namespace_prefix = '\\\\?\\'
reserved_names = (
- {'CON', 'PRN', 'AUX', 'NUL'} |
- {'COM%d' % i for i in range(1, 10)} |
- {'LPT%d' % i for i in range(1, 10)}
+ {'CON', 'PRN', 'AUX', 'NUL', 'CONIN$', 'CONOUT$'} |
+ {'COM%s' % c for c in '123456789\xb9\xb2\xb3'} |
+ {'LPT%s' % c for c in '123456789\xb9\xb2\xb3'}
)
# Interesting findings about extended paths:
- # - '\\?\c:\a', '//?/c:\a' and '//?/c:/a' are all supported
- # but '\\?\c:/a' is not
- # - extended paths are always absolute; "relative" extended paths will
- # fail.
+ # * '\\?\c:\a' is an extended path, which bypasses normal Windows API
+ # path processing. Thus relative paths are not resolved and slash is not
+ # translated to backslash. It has the native NT path limit of 32767
+ # characters, but a bit less after resolving device symbolic links,
+ # such as '\??\C:' => '\Device\HarddiskVolume2'.
+ # * '\\?\c:/a' looks for a device named 'C:/a' because slash is a
+ # regular name character in the object namespace.
+ # * '\\?\c:\foo/bar' is invalid because '/' is illegal in NT filesystems.
+ # The only path separator at the filesystem level is backslash.
+ # * '//?/c:\a' and '//?/c:/a' are effectively equivalent to '\\.\c:\a' and
+ # thus limited to MAX_PATH.
+ # * Prior to Windows 8, ANSI API bytes paths are limited to MAX_PATH,
+ # even with the '\\?\' prefix.
def splitroot(self, part, sep=sep):
first = part[0:1]
def is_reserved(self, parts):
# NOTE: the rules for reserved names seem somewhat complicated
- # (e.g. r"..\NUL" is reserved but not r"foo\NUL").
- # We err on the side of caution and return True for paths which are
- # not considered reserved by Windows.
+ # (e.g. r"..\NUL" is reserved but not r"foo\NUL" if "foo" does not
+ # exist). We err on the side of caution and return True for paths
+ # which are not considered reserved by Windows.
if not parts:
return False
if parts[0].startswith('\\\\'):
# UNC paths are never reserved
return False
- return parts[-1].partition('.')[0].upper() in self.reserved_names
+ name = parts[-1].partition('.')[0].partition(':')[0].rstrip(' ')
+ return name.upper() in self.reserved_names
def make_uri(self, path):
# Under Windows, file URIs use the UTF-8 encoding.
if arg:
import shlex
argv0 = sys.argv[0:1]
- sys.argv = shlex.split(arg)
+ try:
+ sys.argv = shlex.split(arg)
+ except ValueError as e:
+ self.error('Cannot run %s: %s' % (arg, e))
+ return
sys.argv[:0] = argv0
# this is caught in the main debugger loop
raise Restart
print('Error:', mainpyfile, 'does not exist')
sys.exit(1)
+ if run_as_module:
+ import runpy
+ try:
+ runpy._get_module_details(mainpyfile)
+ except Exception:
+ traceback.print_exc()
+ sys.exit(1)
+
sys.argv[:] = args # Hide "pdb.py" and pdb options from argument list
if not run_as_module:
for cmd in ('ver', 'command /c ver', 'cmd /c ver'):
try:
info = subprocess.check_output(cmd,
+ stdin=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
text=True,
shell=True)
initial_slashes = path.startswith(sep)
# POSIX allows one or two initial slashes, but treats three or more
# as single slash.
+ # (see http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap04.html#tag_04_13)
if (initial_slashes and
path.startswith(sep*2) and not path.startswith(sep*3)):
initial_slashes = 2
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
- url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
+ url = 'https://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif selfdot:
# Create a link for methods like 'self.method(...)'
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
- filename = tempfile.mktemp()
- with open(filename, 'w', errors='backslashreplace') as file:
- file.write(text)
- try:
+ with tempfile.TemporaryDirectory() as tempdir:
+ filename = os.path.join(tempdir, 'pydoc.out')
+ with open(filename, 'w', errors='backslashreplace',
+ encoding=os.device_encoding(0) if
+ sys.platform == 'win32' else None
+ ) as file:
+ file.write(text)
os.system(cmd + ' "' + filename + '"')
- finally:
- os.unlink(filename)
def _escape_stdout(text):
# Escape non-encodable characters to avoid encoding errors later
# -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Mon Jun 28 10:13:28 2021
+# Autogenerated by Sphinx on Mon Aug 30 20:40:44 2021
topics = {'assert': 'The "assert" statement\n'
'**********************\n'
'\n'
'In the latter case, sequence repetition is performed; a negative\n'
'repetition factor yields an empty sequence.\n'
'\n'
+ 'This operation can be customized using the special "__mul__()" '
+ 'and\n'
+ '"__rmul__()" methods.\n'
+ '\n'
'The "@" (at) operator is intended to be used for matrix\n'
'multiplication. No builtin Python types implement this operator.\n'
'\n'
'result. Division by zero raises the "ZeroDivisionError" '
'exception.\n'
'\n'
+ 'This operation can be customized using the special "__truediv__()" '
+ 'and\n'
+ '"__floordiv__()" methods.\n'
+ '\n'
'The "%" (modulo) operator yields the remainder from the division '
'of\n'
'the first argument by the second. The numeric arguments are '
'string formatting is described in the Python Library Reference,\n'
'section printf-style String Formatting.\n'
'\n'
+ 'The *modulo* operation can be customized using the special '
+ '"__mod__()"\n'
+ 'method.\n'
+ '\n'
'The floor division operator, the modulo operator, and the '
'"divmod()"\n'
'function are not defined for complex numbers. Instead, convert to '
'and then added together. In the latter case, the sequences are\n'
'concatenated.\n'
'\n'
+ 'This operation can be customized using the special "__add__()" '
+ 'and\n'
+ '"__radd__()" methods.\n'
+ '\n'
'The "-" (subtraction) operator yields the difference of its '
'arguments.\n'
- 'The numeric arguments are first converted to a common type.\n',
+ 'The numeric arguments are first converted to a common type.\n'
+ '\n'
+ 'This operation can be customized using the special "__sub__()" '
+ 'method.\n',
'bitwise': 'Binary bitwise operations\n'
'*************************\n'
'\n'
'\n'
'The "&" operator yields the bitwise AND of its arguments, which '
'must\n'
- 'be integers.\n'
+ 'be integers or one of them must be a custom object overriding\n'
+ '"__and__()" or "__rand__()" special methods.\n'
'\n'
'The "^" operator yields the bitwise XOR (exclusive OR) of its\n'
- 'arguments, which must be integers.\n'
+ 'arguments, which must be integers or one of them must be a '
+ 'custom\n'
+ 'object overriding "__xor__()" or "__rxor__()" special methods.\n'
'\n'
'The "|" operator yields the bitwise (inclusive) OR of its '
'arguments,\n'
- 'which must be integers.\n',
+ 'which must be integers or one of them must be a custom object\n'
+ 'overriding "__or__()" or "__ror__()" special methods.\n',
'bltin-code-objects': 'Code Objects\n'
'************\n'
'\n'
' comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n'
' | "is" ["not"] | ["not"] "in"\n'
'\n'
- 'Comparisons yield boolean values: "True" or "False".\n'
+ 'Comparisons yield boolean values: "True" or "False". Custom '
+ '*rich\n'
+ 'comparison methods* may return non-boolean values. In this '
+ 'case Python\n'
+ 'will call "bool()" on such value in boolean contexts.\n'
'\n'
'Comparisons can be chained arbitrarily, e.g., "x < y <= z" '
'is\n'
'"ZeroDivisionError".\n'
'Raising a negative number to a fractional power results in a '
'"complex"\n'
- 'number. (In earlier versions it raised a "ValueError".)\n',
+ 'number. (In earlier versions it raised a "ValueError".)\n'
+ '\n'
+ 'This operation can be customized using the special "__pow__()" '
+ 'method.\n',
'raise': 'The "raise" statement\n'
'*********************\n'
'\n'
'the\n'
'second argument.\n'
'\n'
+ 'This operation can be customized using the special '
+ '"__lshift__()" and\n'
+ '"__rshift__()" methods.\n'
+ '\n'
'A right shift by *n* bits is defined as floor division by '
'"pow(2,n)".\n'
'A left shift by *n* bits is defined as multiplication with '
'*start* and\n'
' *end* are interpreted as in slice notation.\n'
'\n'
- "str.encode(encoding='utf-8', errors='strict')\n"
+ 'str.encode(encoding="utf-8", errors="strict")\n'
'\n'
' Return an encoded version of the string as a bytes '
'object. Default\n'
'followed by\n'
' the string itself.\n'
'\n'
- 'str.rsplit(sep=None, maxsplit=- 1)\n'
+ 'str.rsplit(sep=None, maxsplit=-1)\n'
'\n'
' Return a list of the words in the string, using *sep* '
'as the\n'
" >>> 'Monty Python'.removesuffix(' Python')\n"
" 'Monty'\n"
'\n'
- 'str.split(sep=None, maxsplit=- 1)\n'
+ 'str.split(sep=None, maxsplit=-1)\n'
'\n'
' Return a list of the words in the string, using *sep* '
'as the\n'
' points. All the code points in the range "U+0000 - '
'U+10FFFF"\n'
' can be represented in a string. Python doesn’t have a '
- '*char*\n'
+ '"char"\n'
' type; instead, every code point in the string is '
'represented\n'
' as a string object with length "1". The built-in '
'| | "s[i:i] = '
'[x]") | |\n'
'+--------------------------------+----------------------------------+-----------------------+\n'
- '| "s.pop([i])" | retrieves the item at *i* '
+ '| "s.pop()" or "s.pop(i)" | retrieves the item at *i* '
'and | (2) |\n'
'| | also removes it from '
'*s* | |\n'
'| | "s[i:i] = '
'[x]") | |\n'
'+--------------------------------+----------------------------------+-----------------------+\n'
- '| "s.pop([i])" | retrieves the item at '
+ '| "s.pop()" or "s.pop(i)" | retrieves the item at '
'*i* and | (2) |\n'
'| | also removes it from '
'*s* | |\n'
' u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n'
'\n'
'The unary "-" (minus) operator yields the negation of its numeric\n'
- 'argument.\n'
+ 'argument; the operation can be overridden with the "__neg__()" '
+ 'special\n'
+ 'method.\n'
'\n'
'The unary "+" (plus) operator yields its numeric argument '
- 'unchanged.\n'
+ 'unchanged;\n'
+ 'the operation can be overridden with the "__pos__()" special '
+ 'method.\n'
'\n'
'The unary "~" (invert) operator yields the bitwise inversion of '
'its\n'
'integer argument. The bitwise inversion of "x" is defined as\n'
- '"-(x+1)". It only applies to integral numbers.\n'
+ '"-(x+1)". It only applies to integral numbers or to custom '
+ 'objects\n'
+ 'that override the "__invert__()" special method.\n'
'\n'
'In all three cases, if the argument does not have the proper type, '
'a\n'
if (word[:n] == attr and
not (noprefix and word[:n+1] == noprefix)):
match = "%s.%s" % (expr, word)
- try:
- val = getattr(thisobject, word)
- except Exception:
- pass # Include even if attribute not set
+ if isinstance(getattr(type(thisobject), word, None),
+ property):
+ # bpo-44752: thisobject.word is a method decorated by
+ # `@property`. What follows applies a postfix if
+ # thisobject.word is callable, but know we know that
+ # this is not callable (because it is a property).
+ # Also, getattr(thisobject, word) will evaluate the
+ # property method, which is not desirable.
+ matches.append(match)
+ continue
+ if (value := getattr(thisobject, word, None)) is not None:
+ matches.append(self._callable_postfix(value, match))
else:
- match = self._callable_postfix(val, match)
- matches.append(match)
+ matches.append(match)
if matches or not noprefix:
break
if noprefix == '_':
if not follow_symlinks and _islink(src):
os.symlink(os.readlink(src), dst)
else:
- with open(src, 'rb') as fsrc, open(dst, 'wb') as fdst:
- # macOS
- if _HAS_FCOPYFILE:
- try:
- _fastcopy_fcopyfile(fsrc, fdst, posix._COPYFILE_DATA)
- return dst
- except _GiveupOnFastCopy:
- pass
- # Linux
- elif _USE_CP_SENDFILE:
- try:
- _fastcopy_sendfile(fsrc, fdst)
+ try:
+ with open(src, 'rb') as fsrc, open(dst, 'wb') as fdst:
+ # macOS
+ if _HAS_FCOPYFILE:
+ try:
+ _fastcopy_fcopyfile(fsrc, fdst, posix._COPYFILE_DATA)
+ return dst
+ except _GiveupOnFastCopy:
+ pass
+ # Linux
+ elif _USE_CP_SENDFILE:
+ try:
+ _fastcopy_sendfile(fsrc, fdst)
+ return dst
+ except _GiveupOnFastCopy:
+ pass
+ # Windows, see:
+ # https://github.com/python/cpython/pull/7160#discussion_r195405230
+ elif _WINDOWS and file_size > 0:
+ _copyfileobj_readinto(fsrc, fdst, min(file_size, COPY_BUFSIZE))
return dst
- except _GiveupOnFastCopy:
- pass
- # Windows, see:
- # https://github.com/python/cpython/pull/7160#discussion_r195405230
- elif _WINDOWS and file_size > 0:
- _copyfileobj_readinto(fsrc, fdst, min(file_size, COPY_BUFSIZE))
- return dst
-
- copyfileobj(fsrc, fdst)
+
+ copyfileobj(fsrc, fdst)
+
+ # Issue 43219, raise a less confusing exception
+ except IsADirectoryError as e:
+ if os.path.exists(dst):
+ raise
+ else:
+ raise FileNotFoundError(f'Directory does not exist: {dst}') from e
return dst
def putcmd(self, cmd, args=""):
"""Send a command to the server."""
if args == "":
- str = '%s%s' % (cmd, CRLF)
+ s = cmd
else:
- str = '%s %s%s' % (cmd, args, CRLF)
- self.send(str)
+ s = f'{cmd} {args}'
+ if '\r' in s or '\n' in s:
+ s = s.replace('\n', '\\n').replace('\r', '\\r')
+ raise ValueError(
+ f'command and arguments contain prohibited newline characters: {s}'
+ )
+ self.send(f'{s}{CRLF}')
def getreply(self):
"""Get a reply from the server.
An empty argument is interpreted as meaning the local host.
First the hostname returned by gethostbyaddr() is checked, then
- possibly existing aliases. In case no FQDN is available, hostname
- from gethostname() is returned.
+ possibly existing aliases. In case no FQDN is available and `name`
+ was given, it is returned unchanged. If `name` was empty or '0.0.0.0',
+ hostname from gethostname() is returned.
"""
name = name.strip()
if not name or name == '0.0.0.0':
self.cx.backup(bck, name='non-existing')
self.assertIn(
str(cm.exception),
- ['SQL logic error', 'SQL logic error or missing database']
+ ['SQL logic error', 'SQL logic error or missing database',
+ 'unknown database non-existing']
)
self.cx.execute("ATTACH DATABASE ':memory:' AS attached_db")
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
+import subprocess
import threading
import unittest
import sqlite3 as sqlite
+import sys
-from test.support import TESTFN, unlink
+from test.support import SHORT_TIMEOUT, TESTFN, unlink
class ModuleTests(unittest.TestCase):
sqlite.connect(':memory:', check_same_thread=False)
self.assertEqual(str(cm.exception), 'shared connections not available')
+
+class UninitialisedConnectionTests(unittest.TestCase):
+ def setUp(self):
+ self.cx = sqlite.Connection.__new__(sqlite.Connection)
+
+ def test_uninit_operations(self):
+ funcs = (
+ lambda: self.cx.isolation_level,
+ lambda: self.cx.total_changes,
+ lambda: self.cx.in_transaction,
+ lambda: self.cx.iterdump(),
+ lambda: self.cx.cursor(),
+ lambda: self.cx.close(),
+ )
+ for func in funcs:
+ with self.subTest(func=func):
+ self.assertRaisesRegex(sqlite.ProgrammingError,
+ "Base Connection.__init__ not called",
+ func)
+
+
class CursorTests(unittest.TestCase):
def setUp(self):
self.cx = sqlite.connect(":memory:")
def __init__(self):
self.value = 5
+ def __iter__(self):
+ return self
+
def __next__(self):
if self.value == 10:
raise StopIteration
self.assertEqual(self.cu.fetchall(), [('Very different data!', 'foo')])
+class MultiprocessTests(unittest.TestCase):
+ CONNECTION_TIMEOUT = SHORT_TIMEOUT / 1000. # Defaults to 30 ms
+
+ def tearDown(self):
+ unlink(TESTFN)
+
+ def test_ctx_mgr_rollback_if_commit_failed(self):
+ # bpo-27334: ctx manager does not rollback if commit fails
+ SCRIPT = f"""if 1:
+ import sqlite3
+ def wait():
+ print("started")
+ assert "database is locked" in input()
+
+ cx = sqlite3.connect("{TESTFN}", timeout={self.CONNECTION_TIMEOUT})
+ cx.create_function("wait", 0, wait)
+ with cx:
+ cx.execute("create table t(t)")
+ try:
+ # execute two transactions; both will try to lock the db
+ cx.executescript('''
+ -- start a transaction and wait for parent
+ begin transaction;
+ select * from t;
+ select wait();
+ rollback;
+
+ -- start a new transaction; would fail if parent holds lock
+ begin transaction;
+ select * from t;
+ rollback;
+ ''')
+ finally:
+ cx.close()
+ """
+
+ # spawn child process
+ proc = subprocess.Popen(
+ [sys.executable, "-c", SCRIPT],
+ encoding="utf-8",
+ bufsize=0,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+ self.addCleanup(proc.communicate)
+
+ # wait for child process to start
+ self.assertEqual("started", proc.stdout.readline().strip())
+
+ cx = sqlite.connect(TESTFN, timeout=self.CONNECTION_TIMEOUT)
+ try: # context manager should correctly release the db lock
+ with cx:
+ cx.execute("insert into t values('test')")
+ except sqlite.OperationalError as exc:
+ proc.stdin.write(str(exc))
+ else:
+ proc.stdin.write("no error")
+ finally:
+ cx.close()
+
+ # terminate child process
+ self.assertIsNone(proc.returncode)
+ try:
+ proc.communicate(input="end", timeout=SHORT_TIMEOUT)
+ except subprocess.TimeoutExpired:
+ proc.kill()
+ proc.communicate()
+ raise
+ self.assertEqual(proc.returncode, 0)
+
+
def suite():
module_suite = unittest.makeSuite(ModuleTests, "Check")
connection_suite = unittest.makeSuite(ConnectionTests, "Check")
closed_con_suite = unittest.makeSuite(ClosedConTests, "Check")
closed_cur_suite = unittest.makeSuite(ClosedCurTests, "Check")
on_conflict_suite = unittest.makeSuite(SqliteOnConflictTests, "Check")
+ uninit_con_suite = unittest.makeSuite(UninitialisedConnectionTests)
+ multiproc_con_suite = unittest.makeSuite(MultiprocessTests)
return unittest.TestSuite((
module_suite, connection_suite, cursor_suite, thread_suite,
constructor_suite, ext_suite, closed_con_suite, closed_cur_suite,
- on_conflict_suite,
+ on_conflict_suite, uninit_con_suite, multiproc_con_suite,
))
def test():
con = sqlite.connect(":memory:",detect_types=sqlite.PARSE_DECLTYPES)
con.execute("create table foo(bar timestamp)")
con.execute("insert into foo(bar) values (?)", (datetime.datetime.now(),))
- con.execute(SELECT)
+ con.execute(SELECT).close()
con.execute("drop table foo")
con.execute("create table foo(bar integer)")
con.execute("insert into foo(bar) values (5)")
- con.execute(SELECT)
+ con.execute(SELECT).close()
def CheckBindMutatingList(self):
# Issue41662: Crash when mutate a list of parameters during iteration.
def func_returntext():
return "foo"
+def func_returntextwithnull():
+ return "1\x002"
def func_returnunicode():
return "bar"
def func_returnint():
def finalize(self):
return self.val
+class AggrText:
+ def __init__(self):
+ self.txt = ""
+ def step(self, txt):
+ self.txt = self.txt + txt
+ def finalize(self):
+ return self.txt
+
+
class FunctionTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.create_function("returntext", 0, func_returntext)
+ self.con.create_function("returntextwithnull", 0, func_returntextwithnull)
self.con.create_function("returnunicode", 0, func_returnunicode)
self.con.create_function("returnint", 0, func_returnint)
self.con.create_function("returnfloat", 0, func_returnfloat)
self.assertEqual(type(val), str)
self.assertEqual(val, "foo")
+ def CheckFuncReturnTextWithNullChar(self):
+ cur = self.con.cursor()
+ res = cur.execute("select returntextwithnull()").fetchone()[0]
+ self.assertEqual(type(res), str)
+ self.assertEqual(res, "1\x002")
+
def CheckFuncReturnUnicode(self):
cur = self.con.cursor()
cur.execute("select returnunicode()")
def CheckParamString(self):
cur = self.con.cursor()
- cur.execute("select isstring(?)", ("foo",))
- val = cur.fetchone()[0]
- self.assertEqual(val, 1)
+ for text in ["foo", str()]:
+ with self.subTest(text=text):
+ cur.execute("select isstring(?)", (text,))
+ val = cur.fetchone()[0]
+ self.assertEqual(val, 1)
def CheckParamInt(self):
cur = self.con.cursor()
self.con.create_aggregate("checkType", 2, AggrCheckType)
self.con.create_aggregate("checkTypes", -1, AggrCheckTypes)
self.con.create_aggregate("mysum", 1, AggrSum)
+ self.con.create_aggregate("aggtxt", 1, AggrText)
def tearDown(self):
#self.cur.close()
def CheckAggrCheckParamStr(self):
cur = self.con.cursor()
- cur.execute("select checkType('str', ?)", ("foo",))
+ cur.execute("select checkTypes('str', ?, ?)", ("foo", str()))
val = cur.fetchone()[0]
- self.assertEqual(val, 1)
+ self.assertEqual(val, 2)
def CheckAggrCheckParamInt(self):
cur = self.con.cursor()
val = cur.fetchone()[0]
self.assertEqual(val, 60)
+ def CheckAggrText(self):
+ cur = self.con.cursor()
+ for txt in ["foo", "1\x002"]:
+ with self.subTest(txt=txt):
+ cur.execute("select aggtxt(?) from test", (txt,))
+ val = cur.fetchone()[0]
+ self.assertEqual(val, txt)
+
+
class AuthorizerTests(unittest.TestCase):
@staticmethod
def authorizer_cb(action, arg1, arg2, dbname, source):
del c
p.start()
p.join()
+ gc.collect() # For PyPy or other GCs.
self.assertIs(wr(), None)
self.assertEqual(q.get(), 5)
close_queue(q)
self.assertIsInstance(outer[0], list) # Not a ListProxy
self.assertEqual(outer[-1][-1]['feed'], 3)
+ def test_nested_queue(self):
+ a = self.list() # Test queue inside list
+ a.append(self.Queue())
+ a[0].put(123)
+ self.assertEqual(a[0].get(), 123)
+ b = self.dict() # Test queue inside dict
+ b[0] = self.Queue()
+ b[0].put(456)
+ self.assertEqual(b[0].get(), 456)
+
def test_namespace(self):
n = self.Namespace()
n.name = 'Bob'
self.pool.map(identity, objs)
del objs
+ gc.collect() # For PyPy or other GCs.
time.sleep(DELTA) # let threaded cleanup code run
self.assertEqual(set(wr() for wr in refs), {None})
# With a process pool, copies of the objects are returned, check
util._finalizer_registry.clear()
def tearDown(self):
+ gc.collect() # For PyPy or other GCs.
self.assertFalse(util._finalizer_registry)
util._finalizer_registry.update(self.registry_backup)
a = Foo()
util.Finalize(a, conn.send, args=('a',))
del a # triggers callback for a
+ gc.collect() # For PyPy or other GCs.
b = Foo()
close_b = util.Finalize(b, conn.send, args=('b',))
close_b() # triggers callback for b
close_b() # does nothing because callback has already been called
del b # does nothing because callback has already been called
+ gc.collect() # For PyPy or other GCs.
c = Foo()
util.Finalize(c, conn.send, args=('c',))
--- /dev/null
+"""Used to test `get_type_hints()` on a cross-module inherited `TypedDict` class
+
+This script uses future annotations to postpone a type that won't be available
+on the module inheriting from to `Foo`. The subclass in the other module should
+look something like this:
+
+ class Bar(_typed_dict_helper.Foo, total=False):
+ b: int
+"""
+
+from __future__ import annotations
+
+from typing import Optional, TypedDict
+
+OptionalIntType = Optional[int]
+
+class Foo(TypedDict):
+ a: OptionalIntType
"""
import contextlib
+import os
import sys
pass
+def test_marshal():
+ import marshal
+ o = ("a", "b", "c", 1, 2, 3)
+ payload = marshal.dumps(o)
+
+ with TestHook() as hook:
+ assertEqual(o, marshal.loads(marshal.dumps(o)))
+
+ try:
+ with open("test-marshal.bin", "wb") as f:
+ marshal.dump(o, f)
+ with open("test-marshal.bin", "rb") as f:
+ assertEqual(o, marshal.load(f))
+ finally:
+ os.unlink("test-marshal.bin")
+
+ actual = [(a[0], a[1]) for e, a in hook.seen if e == "marshal.dumps"]
+ assertSequenceEqual(actual, [(o, marshal.version)] * 2)
+
+ actual = [a[0] for e, a in hook.seen if e == "marshal.loads"]
+ assertSequenceEqual(actual, [payload])
+
+ actual = [e for e, a in hook.seen if e == "marshal.load"]
+ assertSequenceEqual(actual, ["marshal.load"])
+
+
def test_pickle():
import pickle
# default (see bpo-30822).
RESOURCE_NAMES = ALL_RESOURCES + ('extralargefile', 'tzdata')
+
+class Namespace(argparse.Namespace):
+ def __init__(self, **kwargs) -> None:
+ self.testdir = None
+ self.verbose = 0
+ self.quiet = False
+ self.exclude = False
+ self.single = False
+ self.randomize = False
+ self.fromfile = None
+ self.findleaks = 1
+ self.fail_env_changed = False
+ self.use_resources = None
+ self.trace = False
+ self.coverdir = 'coverage'
+ self.runleaks = False
+ self.huntrleaks = False
+ self.verbose2 = False
+ self.verbose3 = False
+ self.print_slow = False
+ self.random_seed = None
+ self.use_mp = None
+ self.forever = False
+ self.header = False
+ self.failfast = False
+ self.match_tests = None
+ self.ignore_tests = None
+ self.pgo = False
+ self.pgo_extended = False
+
+ super().__init__(**kwargs)
+
+
class _ArgParser(argparse.ArgumentParser):
def error(self, message):
def _parse_args(args, **kwargs):
# Defaults
- ns = argparse.Namespace(testdir=None, verbose=0, quiet=False,
- exclude=False, single=False, randomize=False, fromfile=None,
- findleaks=1, use_resources=None, trace=False, coverdir='coverage',
- runleaks=False, huntrleaks=False, verbose2=False, print_slow=False,
- random_seed=None, use_mp=None, verbose3=False, forever=False,
- header=False, failfast=False, match_tests=None, ignore_tests=None,
- pgo=False)
+ ns = Namespace()
for k, v in kwargs.items():
if not hasattr(ns, k):
raise TypeError('%r is an invalid keyword argument '
import unittest
from test.libregrtest.cmdline import _parse_args
from test.libregrtest.runtest import (
- findtests, runtest, get_abs_module,
- STDTESTS, NOTTESTS, PASSED, FAILED, ENV_CHANGED, SKIPPED, RESOURCE_DENIED,
- INTERRUPTED, CHILD_ERROR, TEST_DID_NOT_RUN, TIMEOUT,
- PROGRESS_MIN_TIME, format_test_result, is_failed)
+ findtests, runtest, get_abs_module, is_failed,
+ STDTESTS, NOTTESTS, PROGRESS_MIN_TIME,
+ Passed, Failed, EnvChanged, Skipped, ResourceDenied, Interrupted,
+ ChildError, DidNotRun)
from test.libregrtest.setup import setup_tests
from test.libregrtest.pgo import setup_pgo_tests
from test.libregrtest.utils import removepy, count, format_duration, printlist
| set(self.run_no_tests))
def accumulate_result(self, result, rerun=False):
- test_name = result.test_name
- ok = result.result
+ test_name = result.name
- if ok not in (CHILD_ERROR, INTERRUPTED) and not rerun:
- self.test_times.append((result.test_time, test_name))
+ if not isinstance(result, (ChildError, Interrupted)) and not rerun:
+ self.test_times.append((result.duration_sec, test_name))
- if ok == PASSED:
+ if isinstance(result, Passed):
self.good.append(test_name)
- elif ok in (FAILED, CHILD_ERROR):
- if not rerun:
- self.bad.append(test_name)
- elif ok == ENV_CHANGED:
- self.environment_changed.append(test_name)
- elif ok == SKIPPED:
- self.skipped.append(test_name)
- elif ok == RESOURCE_DENIED:
+ elif isinstance(result, ResourceDenied):
self.skipped.append(test_name)
self.resource_denieds.append(test_name)
- elif ok == TEST_DID_NOT_RUN:
+ elif isinstance(result, Skipped):
+ self.skipped.append(test_name)
+ elif isinstance(result, EnvChanged):
+ self.environment_changed.append(test_name)
+ elif isinstance(result, Failed):
+ if not rerun:
+ self.bad.append(test_name)
+ self.rerun.append(result)
+ elif isinstance(result, DidNotRun):
self.run_no_tests.append(test_name)
- elif ok == INTERRUPTED:
+ elif isinstance(result, Interrupted):
self.interrupted = True
- elif ok == TIMEOUT:
- self.bad.append(test_name)
else:
- raise ValueError("invalid test result: %r" % ok)
+ raise ValueError("invalid test result: %r" % result)
- if rerun and ok not in {FAILED, CHILD_ERROR, INTERRUPTED}:
+ if rerun and not isinstance(result, (Failed, Interrupted)):
self.bad.remove(test_name)
xml_data = result.xml_data
self.log()
self.log("Re-running failed tests in verbose mode")
- self.rerun = self.bad[:]
- for test_name in self.rerun:
- self.log(f"Re-running {test_name} in verbose mode")
+ rerun_list = self.rerun[:]
+ self.rerun = []
+ for result in rerun_list:
+ test_name = result.name
+ errors = result.errors or []
+ failures = result.failures or []
+ error_names = [test_full_name.split(" ")[0] for (test_full_name, *_) in errors]
+ failure_names = [test_full_name.split(" ")[0] for (test_full_name, *_) in failures]
self.ns.verbose = True
+ orig_match_tests = self.ns.match_tests
+ if errors or failures:
+ if self.ns.match_tests is None:
+ self.ns.match_tests = []
+ self.ns.match_tests.extend(error_names)
+ self.ns.match_tests.extend(failure_names)
+ matching = "matching: " + ", ".join(self.ns.match_tests)
+ self.log(f"Re-running {test_name} in verbose mode ({matching})")
+ else:
+ self.log(f"Re-running {test_name} in verbose mode")
result = runtest(self.ns, test_name)
+ self.ns.match_tests = orig_match_tests
self.accumulate_result(result, rerun=True)
- if result.result == INTERRUPTED:
+ if isinstance(result, Interrupted):
break
if self.bad:
if self.rerun:
print()
print("%s:" % count(len(self.rerun), "re-run test"))
- printlist(self.rerun)
+ printlist(r.name for r in self.rerun)
if self.run_no_tests:
print()
result = runtest(self.ns, test_name)
self.accumulate_result(result)
- if result.result == INTERRUPTED:
+ if isinstance(result, Interrupted):
break
- previous_test = format_test_result(result)
+ previous_test = str(result)
test_time = time.monotonic() - start_time
if test_time >= PROGRESS_MIN_TIME:
previous_test = "%s in %s" % (previous_test, format_duration(test_time))
- elif result.result == PASSED:
+ elif isinstance(result, Passed):
# be quiet: say nothing if the test passed shortly
previous_test = None
-import collections
+from __future__ import annotations
+
import faulthandler
import functools
import gc
from test import support
from test.libregrtest.refleak import dash_R, clear_caches
+from test.libregrtest.cmdline import Namespace
from test.libregrtest.save_env import saved_test_environment
from test.libregrtest.utils import format_duration, print_warning
-# Test result constants.
-PASSED = 1
-FAILED = 0
-ENV_CHANGED = -1
-SKIPPED = -2
-RESOURCE_DENIED = -3
-INTERRUPTED = -4
-CHILD_ERROR = -5 # error in a child process
-TEST_DID_NOT_RUN = -6
-TIMEOUT = -7
-
-_FORMAT_TEST_RESULT = {
- PASSED: '%s passed',
- FAILED: '%s failed',
- ENV_CHANGED: '%s failed (env changed)',
- SKIPPED: '%s skipped',
- RESOURCE_DENIED: '%s skipped (resource denied)',
- INTERRUPTED: '%s interrupted',
- CHILD_ERROR: '%s crashed',
- TEST_DID_NOT_RUN: '%s run no tests',
- TIMEOUT: '%s timed out',
-}
+class TestResult:
+ def __init__(
+ self,
+ name: str,
+ duration_sec: float = 0.0,
+ xml_data: list[str] | None = None,
+ ) -> None:
+ self.name = name
+ self.duration_sec = duration_sec
+ self.xml_data = xml_data
+
+ def __str__(self) -> str:
+ return f"{self.name} finished"
+
+
+class Passed(TestResult):
+ def __str__(self) -> str:
+ return f"{self.name} passed"
+
+
+class Failed(TestResult):
+ def __init__(
+ self,
+ name: str,
+ duration_sec: float = 0.0,
+ xml_data: list[str] | None = None,
+ errors: list[tuple[str, str]] | None = None,
+ failures: list[tuple[str, str]] | None = None,
+ ) -> None:
+ super().__init__(name, duration_sec=duration_sec, xml_data=xml_data)
+ self.errors = errors
+ self.failures = failures
+
+ def __str__(self) -> str:
+ if self.errors and self.failures:
+ le = len(self.errors)
+ lf = len(self.failures)
+ error_s = "error" + ("s" if le > 1 else "")
+ failure_s = "failure" + ("s" if lf > 1 else "")
+ return f"{self.name} failed ({le} {error_s}, {lf} {failure_s})"
+
+ if self.errors:
+ le = len(self.errors)
+ error_s = "error" + ("s" if le > 1 else "")
+ return f"{self.name} failed ({le} {error_s})"
+
+ if self.failures:
+ lf = len(self.failures)
+ failure_s = "failure" + ("s" if lf > 1 else "")
+ return f"{self.name} failed ({lf} {failure_s})"
+
+ return f"{self.name} failed"
+
+
+class UncaughtException(Failed):
+ def __str__(self) -> str:
+ return f"{self.name} failed (uncaught exception)"
+
+
+class EnvChanged(Failed):
+ def __str__(self) -> str:
+ return f"{self.name} failed (env changed)"
+
+
+class RefLeak(Failed):
+ def __str__(self) -> str:
+ return f"{self.name} failed (reference leak)"
+
+
+class Skipped(TestResult):
+ def __str__(self) -> str:
+ return f"{self.name} skipped"
+
+
+class ResourceDenied(Skipped):
+ def __str__(self) -> str:
+ return f"{self.name} skipped (resource denied)"
+
+
+class Interrupted(TestResult):
+ def __str__(self) -> str:
+ return f"{self.name} interrupted"
+
+
+class ChildError(Failed):
+ def __str__(self) -> str:
+ return f"{self.name} crashed"
+
+
+class DidNotRun(TestResult):
+ def __str__(self) -> str:
+ return f"{self.name} ran no tests"
+
+
+class Timeout(Failed):
+ def __str__(self) -> str:
+ return f"{self.name} timed out ({format_duration(self.duration_sec)})"
+
# Minimum duration of a test to display its duration or to mention that
# the test is running in background
FOUND_GARBAGE = []
-def is_failed(result, ns):
- ok = result.result
- if ok in (PASSED, RESOURCE_DENIED, SKIPPED, TEST_DID_NOT_RUN):
- return False
- if ok == ENV_CHANGED:
+def is_failed(result: TestResult, ns: Namespace) -> bool:
+ if isinstance(result, EnvChanged):
return ns.fail_env_changed
- return True
-
-
-def format_test_result(result):
- fmt = _FORMAT_TEST_RESULT.get(result.result, "%s")
- text = fmt % result.test_name
- if result.result == TIMEOUT:
- text = '%s (%s)' % (text, format_duration(result.test_time))
- return text
+ return isinstance(result, Failed)
def findtestdir(path=None):
return stdtests + sorted(tests)
-def get_abs_module(ns, test_name):
+def get_abs_module(ns: Namespace, test_name: str) -> str:
if test_name.startswith('test.') or ns.testdir:
return test_name
else:
return 'test.' + test_name
-TestResult = collections.namedtuple('TestResult',
- 'test_name result test_time xml_data')
-
-def _runtest(ns, test_name):
+def _runtest(ns: Namespace, test_name: str) -> TestResult:
# Handle faulthandler timeout, capture stdout+stderr, XML serialization
# and measure time.
sys.stderr = stream
result = _runtest_inner(ns, test_name,
display_failure=False)
- if result != PASSED:
+ if not isinstance(result, Passed):
output = stream.getvalue()
orig_stderr.write(output)
orig_stderr.flush()
if xml_list:
import xml.etree.ElementTree as ET
- xml_data = [ET.tostring(x).decode('us-ascii') for x in xml_list]
- else:
- xml_data = None
+ result.xml_data = [
+ ET.tostring(x).decode('us-ascii')
+ for x in xml_list
+ ]
- test_time = time.perf_counter() - start_time
-
- return TestResult(test_name, result, test_time, xml_data)
+ result.duration_sec = time.perf_counter() - start_time
+ return result
finally:
if use_timeout:
faulthandler.cancel_dump_traceback_later()
support.junit_xml_list = None
-def runtest(ns, test_name):
+def runtest(ns: Namespace, test_name: str) -> TestResult:
"""Run a single test.
ns -- regrtest namespace of options
test_name -- the name of the test
- Returns the tuple (result, test_time, xml_data), where result is one
- of the constants:
-
- INTERRUPTED KeyboardInterrupt
- RESOURCE_DENIED test skipped because resource denied
- SKIPPED test skipped for some other reason
- ENV_CHANGED test failed because it changed the execution environment
- FAILED test failed
- PASSED test passed
- EMPTY_TEST_SUITE test ran no subtests.
- TIMEOUT test timed out.
+ Returns a TestResult sub-class depending on the kind of result received.
If ns.xmlpath is not None, xml_data is a list containing each
generated testsuite element.
msg = traceback.format_exc()
print(f"test {test_name} crashed -- {msg}",
file=sys.stderr, flush=True)
- return TestResult(test_name, FAILED, 0.0, None)
+ return Failed(test_name)
def _test_module(the_module):
support.run_unittest(tests)
-def _runtest_inner2(ns, test_name):
+def _runtest_inner2(ns: Namespace, test_name: str) -> bool:
# Load the test function, run the test function, handle huntrleaks
# and findleaks to detect leaks
return refleak
-def _runtest_inner(ns, test_name, display_failure=True):
+def _runtest_inner(
+ ns: Namespace, test_name: str, display_failure: bool = True
+) -> TestResult:
# Detect environment changes, handle exceptions.
# Reset the environment_altered flag to detect if a test altered
except support.ResourceDenied as msg:
if not ns.quiet and not ns.pgo:
print(f"{test_name} skipped -- {msg}", flush=True)
- return RESOURCE_DENIED
+ return ResourceDenied(test_name)
except unittest.SkipTest as msg:
if not ns.quiet and not ns.pgo:
print(f"{test_name} skipped -- {msg}", flush=True)
- return SKIPPED
+ return Skipped(test_name)
+ except support.TestFailedWithDetails as exc:
+ msg = f"test {test_name} failed"
+ if display_failure:
+ msg = f"{msg} -- {exc}"
+ print(msg, file=sys.stderr, flush=True)
+ return Failed(test_name, errors=exc.errors, failures=exc.failures)
except support.TestFailed as exc:
msg = f"test {test_name} failed"
if display_failure:
msg = f"{msg} -- {exc}"
print(msg, file=sys.stderr, flush=True)
- return FAILED
+ return Failed(test_name)
except support.TestDidNotRun:
- return TEST_DID_NOT_RUN
+ return DidNotRun(test_name)
except KeyboardInterrupt:
print()
- return INTERRUPTED
+ return Interrupted(test_name)
except:
if not ns.pgo:
msg = traceback.format_exc()
print(f"test {test_name} crashed -- {msg}",
file=sys.stderr, flush=True)
- return FAILED
+ return UncaughtException(test_name)
if refleak:
- return FAILED
+ return RefLeak(test_name)
if environment.changed:
- return ENV_CHANGED
- return PASSED
+ return EnvChanged(test_name)
+ return Passed(test_name)
-def cleanup_test_droppings(test_name, verbose):
+def cleanup_test_droppings(test_name: str, verbose: int) -> None:
# First kill any dangling references to open files etc.
# This can also issue some ResourceWarnings which would otherwise get
# triggered during the following test run, and possibly produce failures.
-import collections
+from __future__ import annotations
+
import faulthandler
import json
import os
import threading
import time
import traceback
-import types
+from typing import NamedTuple, NoReturn, Literal, Any, TYPE_CHECKING
+
from test import support
+from test.libregrtest.cmdline import Namespace
+from test.libregrtest.main import Regrtest
from test.libregrtest.runtest import (
- runtest, INTERRUPTED, CHILD_ERROR, PROGRESS_MIN_TIME,
- format_test_result, TestResult, is_failed, TIMEOUT)
+ runtest, is_failed, TestResult, Interrupted, Timeout, ChildError, PROGRESS_MIN_TIME)
from test.libregrtest.setup import setup_tests
from test.libregrtest.utils import format_duration, print_warning
USE_PROCESS_GROUP = (hasattr(os, "setsid") and hasattr(os, "killpg"))
-def must_stop(result, ns):
- if result.result == INTERRUPTED:
+def must_stop(result: TestResult, ns: Namespace) -> bool:
+ if isinstance(result, Interrupted):
return True
if ns.failfast and is_failed(result, ns):
return True
return False
-def parse_worker_args(worker_args):
+def parse_worker_args(worker_args) -> tuple[Namespace, str]:
ns_dict, test_name = json.loads(worker_args)
- ns = types.SimpleNamespace(**ns_dict)
+ ns = Namespace(**ns_dict)
return (ns, test_name)
-def run_test_in_subprocess(testname, ns):
+def run_test_in_subprocess(testname: str, ns: Namespace) -> subprocess.Popen:
ns_dict = vars(ns)
worker_args = (ns_dict, testname)
worker_args = json.dumps(worker_args)
**kw)
-def run_tests_worker(ns, test_name):
+def run_tests_worker(ns: Namespace, test_name: str) -> NoReturn:
setup_tests(ns)
result = runtest(ns, test_name)
print() # Force a newline (just in case)
- # Serialize TestResult as list in JSON
- print(json.dumps(list(result)), flush=True)
+ # Serialize TestResult as dict in JSON
+ print(json.dumps(result, cls=EncodeTestResult), flush=True)
sys.exit(0)
self.tests_iter = None
-MultiprocessResult = collections.namedtuple('MultiprocessResult',
- 'result stdout stderr error_msg')
+class MultiprocessResult(NamedTuple):
+ result: TestResult
+ stdout: str
+ stderr: str
+ error_msg: str
+
+
+if TYPE_CHECKING:
+ ExcStr = str
+ QueueOutput = tuple[Literal[False], MultiprocessResult] | tuple[Literal[True], ExcStr]
+
class ExitThread(Exception):
pass
class TestWorkerProcess(threading.Thread):
- def __init__(self, worker_id, runner):
+ def __init__(self, worker_id: int, runner: "MultiprocessTestRunner") -> None:
super().__init__()
self.worker_id = worker_id
self.pending = runner.pending
self._killed = False
self._stopped = False
- def __repr__(self):
+ def __repr__(self) -> str:
info = [f'TestWorkerProcess #{self.worker_id}']
if self.is_alive():
info.append("running")
f'time={format_duration(dt)}'))
return '<%s>' % ' '.join(info)
- def _kill(self):
+ def _kill(self) -> None:
popen = self._popen
if popen is None:
return
except OSError as exc:
print_warning(f"Failed to kill {what}: {exc!r}")
- def stop(self):
+ def stop(self) -> None:
# Method called from a different thread to stop this thread
self._stopped = True
self._kill()
- def mp_result_error(self, test_name, error_type, stdout='', stderr='',
- err_msg=None):
- test_time = time.monotonic() - self.start_time
- result = TestResult(test_name, error_type, test_time, None)
- return MultiprocessResult(result, stdout, stderr, err_msg)
-
- def _run_process(self, test_name):
+ def mp_result_error(
+ self,
+ test_result: TestResult,
+ stdout: str = '',
+ stderr: str = '',
+ err_msg=None
+ ) -> MultiprocessResult:
+ test_result.duration_sec = time.monotonic() - self.start_time
+ return MultiprocessResult(test_result, stdout, stderr, err_msg)
+
+ def _run_process(self, test_name: str) -> tuple[int, str, str]:
self.start_time = time.monotonic()
self.current_test_name = test_name
self._popen = None
self.current_test_name = None
- def _runtest(self, test_name):
+ def _runtest(self, test_name: str) -> MultiprocessResult:
retcode, stdout, stderr = self._run_process(test_name)
if retcode is None:
- return self.mp_result_error(test_name, TIMEOUT, stdout, stderr)
+ return self.mp_result_error(Timeout(test_name), stdout, stderr)
err_msg = None
if retcode != 0:
else:
try:
# deserialize run_tests_worker() output
- result = json.loads(result)
- result = TestResult(*result)
+ result = json.loads(result, object_hook=decode_test_result)
except Exception as exc:
err_msg = "Failed to parse worker JSON: %s" % exc
if err_msg is not None:
- return self.mp_result_error(test_name, CHILD_ERROR,
+ return self.mp_result_error(ChildError(test_name),
stdout, stderr, err_msg)
return MultiprocessResult(result, stdout, stderr, err_msg)
- def run(self):
+ def run(self) -> None:
while not self._stopped:
try:
try:
self.output.put((True, traceback.format_exc()))
break
- def _wait_completed(self):
+ def _wait_completed(self) -> None:
popen = self._popen
# stdout and stderr must be closed to ensure that communicate()
f"(timeout={format_duration(JOIN_TIMEOUT)}): "
f"{exc!r}")
- def wait_stopped(self, start_time):
+ def wait_stopped(self, start_time: float) -> None:
# bpo-38207: MultiprocessTestRunner.stop_workers() called self.stop()
# which killed the process. Sometimes, killing the process from the
# main thread does not interrupt popen.communicate() in
break
-def get_running(workers):
+def get_running(workers: list[TestWorkerProcess]) -> list[TestWorkerProcess]:
running = []
for worker in workers:
current_test_name = worker.current_test_name
class MultiprocessTestRunner:
- def __init__(self, regrtest):
+ def __init__(self, regrtest: Regrtest) -> None:
self.regrtest = regrtest
self.log = self.regrtest.log
self.ns = regrtest.ns
- self.output = queue.Queue()
+ self.output: queue.Queue[QueueOutput] = queue.Queue()
self.pending = MultiprocessIterator(self.regrtest.tests)
if self.ns.timeout is not None:
# Rely on faulthandler to kill a worker process. This timouet is
self.worker_timeout = None
self.workers = None
- def start_workers(self):
+ def start_workers(self) -> None:
self.workers = [TestWorkerProcess(index, self)
for index in range(1, self.ns.use_mp + 1)]
msg = f"Run tests in parallel using {len(self.workers)} child processes"
for worker in self.workers:
worker.start()
- def stop_workers(self):
+ def stop_workers(self) -> None:
start_time = time.monotonic()
for worker in self.workers:
worker.stop()
for worker in self.workers:
worker.wait_stopped(start_time)
- def _get_result(self):
+ def _get_result(self) -> QueueOutput | None:
if not any(worker.is_alive() for worker in self.workers):
# all worker threads are done: consume pending results
try:
if running and not self.ns.pgo:
self.log('running: %s' % ', '.join(running))
- def display_result(self, mp_result):
+ def display_result(self, mp_result: MultiprocessResult) -> None:
result = mp_result.result
- text = format_test_result(result)
+ text = str(result)
if mp_result.error_msg is not None:
# CHILD_ERROR
text += ' (%s)' % mp_result.error_msg
- elif (result.test_time >= PROGRESS_MIN_TIME and not self.ns.pgo):
- text += ' (%s)' % format_duration(result.test_time)
+ elif (result.duration_sec >= PROGRESS_MIN_TIME and not self.ns.pgo):
+ text += ' (%s)' % format_duration(result.duration_sec)
running = get_running(self.workers)
if running and not self.ns.pgo:
text += ' -- running: %s' % ', '.join(running)
self.regrtest.display_progress(self.test_index, text)
- def _process_result(self, item):
+ def _process_result(self, item: QueueOutput) -> bool:
+ """Returns True if test runner must stop."""
if item[0]:
# Thread got an exception
format_exc = item[1]
return False
- def run_tests(self):
+ def run_tests(self) -> None:
self.start_workers()
self.test_index = 0
self.stop_workers()
-def run_tests_multiprocess(regrtest):
+def run_tests_multiprocess(regrtest: Regrtest) -> None:
MultiprocessTestRunner(regrtest).run_tests()
+
+
+class EncodeTestResult(json.JSONEncoder):
+ """Encode a TestResult (sub)class object into a JSON dict."""
+
+ def default(self, o: Any) -> dict[str, Any]:
+ if isinstance(o, TestResult):
+ result = vars(o)
+ result["__test_result__"] = o.__class__.__name__
+ return result
+
+ return super().default(o)
+
+
+def decode_test_result(d: dict[str, Any]) -> TestResult | dict[str, Any]:
+ """Decode a TestResult (sub)class object from a JSON dict."""
+
+ if "__test_result__" not in d:
+ return d
+
+ cls_name = d.pop("__test_result__")
+ for cls in get_all_test_result_classes():
+ if cls.__name__ == cls_name:
+ return cls(**d)
+
+
+def get_all_test_result_classes() -> set[type[TestResult]]:
+ prev_count = 0
+ classes = {TestResult}
+ while len(classes) > prev_count:
+ prev_count = len(classes)
+ to_add = []
+ for cls in classes:
+ to_add.extend(cls.__subclasses__())
+ classes.update(to_add)
+ return classes
"""
import os
+import gc
import sys
import time
from _thread import start_new_thread, TIMEOUT_MAX
lock = self.locktype()
ref = weakref.ref(lock)
del lock
+ gc.collect() # For PyPy or other GCs.
self.assertIsNone(ref())
class TestFailed(Error):
"""Test failed."""
+class TestFailedWithDetails(TestFailed):
+ """Test failed."""
+ def __init__(self, msg, errors, failures):
+ self.msg = msg
+ self.errors = errors
+ self.failures = failures
+ super().__init__(msg, errors, failures)
+
+ def __str__(self):
+ return self.msg
+
class TestDidNotRun(Error):
"""Test did not run any subtests."""
# TEST_DATA_DIR is used as a target download location for remote resources
TEST_DATA_DIR = os.path.join(TEST_HOME_DIR, "data")
+
+def darwin_malloc_err_warning(test_name):
+ """Assure user that loud errors generated by macOS libc's malloc are
+ expected."""
+ if sys.platform != 'darwin':
+ return
+
+ import shutil
+ msg = ' NOTICE '
+ detail = (f'{test_name} may generate "malloc can\'t allocate region"\n'
+ 'warnings on macOS systems. This behavior is known. Do not\n'
+ 'report a bug unless tests are also failing. See bpo-40928.')
+
+ padding, _ = shutil.get_terminal_size()
+ print(msg.center(padding, '-'))
+ print(detail)
+ print('-' * padding)
+
+
def findfile(filename, subdir=None):
"""Try to find a file on sys.path or in the test directory. If it is not
found the argument passed to the function is returned (this does not
else:
err = "multiple errors occurred"
if not verbose: err += "; run in verbose mode for details"
- raise TestFailed(err)
+ errors = [(str(tc), exc_str) for tc, exc_str in result.errors]
+ failures = [(str(tc), exc_str) for tc, exc_str in result.failures]
+ raise TestFailedWithDetails(err, errors, failures)
# By default, don't filter tests
yield
finally:
sys.setrecursionlimit(original_depth)
+
+def ignore_deprecations_from(module: str, *, like: str) -> object:
+ token = object()
+ warnings.filterwarnings(
+ "ignore",
+ category=DeprecationWarning,
+ module=module,
+ message=like + fr"(?#support{id(token)})",
+ )
+ return token
+
+def clear_ignored_deprecations(*tokens: object) -> None:
+ if not tokens:
+ raise ValueError("Provide token or tokens returned by ignore_deprecations_from")
+
+ new_filters = []
+ endswith = tuple(rf"(?#support{id(token)})" for token in tokens)
+ for action, message, category, module, lineno in warnings.filters:
+ if action == "ignore" and category is DeprecationWarning:
+ if isinstance(message, re.Pattern):
+ msg = message.pattern
+ else:
+ msg = message or ""
+ if msg.endswith(endswith):
+ continue
+ new_filters.append((action, message, category, module, lineno))
+ if warnings.filters != new_filters:
+ warnings.filters[:] = new_filters
+ warnings._filters_mutated()
--- /dev/null
+import contextlib
+import functools
+import re
+import sys
+import warnings
+
+
+def check_syntax_warning(testcase, statement, errtext='',
+ *, lineno=1, offset=None):
+ # Test also that a warning is emitted only once.
+ from test.support import check_syntax_error
+ with warnings.catch_warnings(record=True) as warns:
+ warnings.simplefilter('always', SyntaxWarning)
+ compile(statement, '<testcase>', 'exec')
+ testcase.assertEqual(len(warns), 1, warns)
+
+ warn, = warns
+ testcase.assertTrue(issubclass(warn.category, SyntaxWarning),
+ warn.category)
+ if errtext:
+ testcase.assertRegex(str(warn.message), errtext)
+ testcase.assertEqual(warn.filename, '<testcase>')
+ testcase.assertIsNotNone(warn.lineno)
+ if lineno is not None:
+ testcase.assertEqual(warn.lineno, lineno)
+
+ # SyntaxWarning should be converted to SyntaxError when raised,
+ # since the latter contains more information and provides better
+ # error report.
+ with warnings.catch_warnings(record=True) as warns:
+ warnings.simplefilter('error', SyntaxWarning)
+ check_syntax_error(testcase, statement, errtext,
+ lineno=lineno, offset=offset)
+ # No warnings are leaked when a SyntaxError is raised.
+ testcase.assertEqual(warns, [])
+
+
+def ignore_warnings(*, category):
+ """Decorator to suppress deprecation warnings.
+
+ Use of context managers to hide warnings make diffs
+ more noisy and tools like 'git blame' less useful.
+ """
+ def decorator(test):
+ @functools.wraps(test)
+ def wrapper(self, *args, **kwargs):
+ with warnings.catch_warnings():
+ warnings.simplefilter('ignore', category=category)
+ return test(self, *args, **kwargs)
+ return wrapper
+ return decorator
+
+
+class WarningsRecorder(object):
+ """Convenience wrapper for the warnings list returned on
+ entry to the warnings.catch_warnings() context manager.
+ """
+ def __init__(self, warnings_list):
+ self._warnings = warnings_list
+ self._last = 0
+
+ def __getattr__(self, attr):
+ if len(self._warnings) > self._last:
+ return getattr(self._warnings[-1], attr)
+ elif attr in warnings.WarningMessage._WARNING_DETAILS:
+ return None
+ raise AttributeError("%r has no attribute %r" % (self, attr))
+
+ @property
+ def warnings(self):
+ return self._warnings[self._last:]
+
+ def reset(self):
+ self._last = len(self._warnings)
+
+
+@contextlib.contextmanager
+def check_warnings(*filters, **kwargs):
+ """Context manager to silence warnings.
+
+ Accept 2-tuples as positional arguments:
+ ("message regexp", WarningCategory)
+
+ Optional argument:
+ - if 'quiet' is True, it does not fail if a filter catches nothing
+ (default True without argument,
+ default False if some filters are defined)
+
+ Without argument, it defaults to:
+ check_warnings(("", Warning), quiet=True)
+ """
+ quiet = kwargs.get('quiet')
+ if not filters:
+ filters = (("", Warning),)
+ # Preserve backward compatibility
+ if quiet is None:
+ quiet = True
+ return _filterwarnings(filters, quiet)
+
+
+@contextlib.contextmanager
+def check_no_warnings(testcase, message='', category=Warning, force_gc=False):
+ """Context manager to check that no warnings are emitted.
+
+ This context manager enables a given warning within its scope
+ and checks that no warnings are emitted even with that warning
+ enabled.
+
+ If force_gc is True, a garbage collection is attempted before checking
+ for warnings. This may help to catch warnings emitted when objects
+ are deleted, such as ResourceWarning.
+
+ Other keyword arguments are passed to warnings.filterwarnings().
+ """
+ from test.support import gc_collect
+ with warnings.catch_warnings(record=True) as warns:
+ warnings.filterwarnings('always',
+ message=message,
+ category=category)
+ yield
+ if force_gc:
+ gc_collect()
+ testcase.assertEqual(warns, [])
+
+
+@contextlib.contextmanager
+def check_no_resource_warning(testcase):
+ """Context manager to check that no ResourceWarning is emitted.
+
+ Usage:
+
+ with check_no_resource_warning(self):
+ f = open(...)
+ ...
+ del f
+
+ You must remove the object which may emit ResourceWarning before
+ the end of the context manager.
+ """
+ with check_no_warnings(testcase, category=ResourceWarning, force_gc=True):
+ yield
+
+
+def _filterwarnings(filters, quiet=False):
+ """Catch the warnings, then check if all the expected
+ warnings have been raised and re-raise unexpected warnings.
+ If 'quiet' is True, only re-raise the unexpected warnings.
+ """
+ # Clear the warning registry of the calling module
+ # in order to re-raise the warnings.
+ frame = sys._getframe(2)
+ registry = frame.f_globals.get('__warningregistry__')
+ if registry:
+ registry.clear()
+ with warnings.catch_warnings(record=True) as w:
+ # Set filter "always" to record all warnings. Because
+ # test_warnings swap the module, we need to look up in
+ # the sys.modules dictionary.
+ sys.modules['warnings'].simplefilter("always")
+ yield WarningsRecorder(w)
+ # Filter the recorded warnings
+ reraise = list(w)
+ missing = []
+ for msg, cat in filters:
+ seen = False
+ for w in reraise[:]:
+ warning = w.message
+ # Filter out the matching messages
+ if (re.match(msg, str(warning), re.I) and
+ issubclass(warning.__class__, cat)):
+ seen = True
+ reraise.remove(w)
+ if not seen and not quiet:
+ # This filter caught nothing
+ missing.append((msg, cat.__name__))
+ if reraise:
+ raise AssertionError("unhandled warning %s" % reraise[0])
+ if missing:
+ raise AssertionError("filter (%r, %s) did not catch any warning" %
+ missing[0])
+
+
+@contextlib.contextmanager
+def save_restore_warnings_filters():
+ old_filters = warnings.filters[:]
+ try:
+ yield
+ finally:
+ warnings.filters[:] = old_filters
+
+
+def _warn_about_deprecation():
+ warnings.warn(
+ "This is used in test_support test to ensure"
+ " support.ignore_deprecations_from() works as expected."
+ " You should not be seeing this.",
+ DeprecationWarning,
+ stacklevel=0,
+ )
return rpipe.read()
+def _wait_for_interp_to_run(interp, timeout=None):
+ # bpo-37224: Running this test file in multiprocesses will fail randomly.
+ # The failure reason is that the thread can't acquire the cpu to
+ # run subinterpreter eariler than the main thread in multiprocess.
+ if timeout is None:
+ timeout = support.SHORT_TIMEOUT
+ start_time = time.monotonic()
+ deadline = start_time + timeout
+ while not interpreters.is_running(interp):
+ if time.monotonic() > deadline:
+ raise RuntimeError('interp is not running')
+ time.sleep(0.010)
+
+
@contextlib.contextmanager
def _running(interp):
r, w = os.pipe()
t = threading.Thread(target=run)
t.start()
+ _wait_for_interp_to_run(interp)
yield
ret = parser.parse_args(())
self.assertIsNone(ret.command)
+ def test_required_subparsers_no_destination_error(self):
+ parser = ErrorRaisingArgumentParser()
+ subparsers = parser.add_subparsers(required=True)
+ subparsers.add_parser('foo')
+ subparsers.add_parser('bar')
+ with self.assertRaises(ArgumentParserError) as excinfo:
+ parser.parse_args(())
+ self.assertRegex(
+ excinfo.exception.stderr,
+ 'error: the following arguments are required: {foo,bar}\n$'
+ )
+
+ def test_wrong_argument_subparsers_no_destination_error(self):
+ parser = ErrorRaisingArgumentParser()
+ subparsers = parser.add_subparsers(required=True)
+ subparsers.add_parser('foo')
+ subparsers.add_parser('bar')
+ with self.assertRaises(ArgumentParserError) as excinfo:
+ parser.parse_args(('baz',))
+ self.assertRegex(
+ excinfo.exception.stderr,
+ r"error: argument {foo,bar}: invalid choice: 'baz' \(choose from 'foo', 'bar'\)\n$"
+ )
+
def test_optional_subparsers(self):
parser = ErrorRaisingArgumentParser()
subparsers = parser.add_subparsers(dest='command', required=False)
argument_signatures = [
Sig('--foo', help='foo help - oh and by the way, %(default)s'),
Sig('--bar', action='store_true', help='bar help'),
+ Sig('--taz', action=argparse.BooleanOptionalAction,
+ help='Whether to taz it', default=True),
+ Sig('--quux', help="Set the quux", default=42),
Sig('spam', help='spam help'),
Sig('badger', nargs='?', default='wooden', help='badger help'),
]
[Sig('--baz', type=int, default=42, help='baz help')]),
]
usage = '''\
- usage: PROG [-h] [--foo FOO] [--bar] [--baz BAZ] spam [badger]
+ usage: PROG [-h] [--foo FOO] [--bar] [--taz | --no-taz] [--quux QUUX]
+ [--baz BAZ]
+ spam [badger]
'''
help = usage + '''\
description
positional arguments:
- spam spam help
- badger badger help (default: wooden)
+ spam spam help
+ badger badger help (default: wooden)
optional arguments:
- -h, --help show this help message and exit
- --foo FOO foo help - oh and by the way, None
- --bar bar help (default: False)
+ -h, --help show this help message and exit
+ --foo FOO foo help - oh and by the way, None
+ --bar bar help (default: False)
+ --taz, --no-taz Whether to taz it (default: True)
+ --quux QUUX Set the quux (default: 42)
title:
description
- --baz BAZ baz help (default: 42)
+ --baz BAZ baz help (default: 42)
'''
version = ''
p = weakref.proxy(s)
self.assertEqual(p.tobytes(), s.tobytes())
s = None
+ support.gc_collect() # For PyPy or other GCs.
self.assertRaises(ReferenceError, len, p)
@unittest.skipUnless(hasattr(sys, 'getrefcount'),
import unittest
from test.support import import_module
+from test.support import gc_collect
asyncio = import_module("asyncio")
await g.__anext__()
await g.__anext__()
del g
+ gc_collect() # For PyPy or other GCs.
await asyncio.sleep(0.1)
import os
-from test.support import load_package_tests, import_module
+from test import support
+import unittest
# Skip tests if we don't have concurrent.futures.
-import_module('concurrent.futures')
+support.import_module('concurrent.futures')
-def load_tests(*args):
- return load_package_tests(os.path.dirname(__file__), *args)
+
+def load_tests(loader, _, pattern):
+ pkg_dir = os.path.dirname(__file__)
+ suite = AsyncioTestSuite()
+ return support.load_package_tests(pkg_dir, loader, suite, pattern)
+
+
+class AsyncioTestSuite(unittest.TestSuite):
+ """A custom test suite that also runs setup/teardown for the whole package.
+
+ Normally unittest only runs setUpModule() and tearDownModule() within each
+ test module part of the test suite. Copying those functions to each file
+ would be tedious, let's run this once and for all.
+ """
+ def run(self, result, debug=False):
+ ignore = support.ignore_deprecations_from
+ tokens = {
+ ignore("asyncio.base_events", like=r".*loop argument.*"),
+ ignore("asyncio.unix_events", like=r".*loop argument.*"),
+ ignore("asyncio.futures", like=r".*loop argument.*"),
+ ignore("asyncio.runners", like=r".*loop argument.*"),
+ ignore("asyncio.subprocess", like=r".*loop argument.*"),
+ ignore("asyncio.tasks", like=r".*loop argument.*"),
+ ignore("test.test_asyncio.test_events", like=r".*loop argument.*"),
+ ignore("test.test_asyncio.test_queues", like=r".*loop argument.*"),
+ ignore("test.test_asyncio.test_tasks", like=r".*loop argument.*"),
+ }
+ try:
+ super().run(result, debug=debug)
+ finally:
+ support.clear_ignored_deprecations(*tokens)
self.loop.set_exception_handler(self.loop_exception_handler)
self.__unhandled_exceptions = []
- # Disable `_get_running_loop`.
- self._old_get_running_loop = asyncio.events._get_running_loop
- asyncio.events._get_running_loop = lambda: None
-
def tearDown(self):
try:
self.loop.close()
self.fail('unexpected calls to loop.call_exception_handler()')
finally:
- asyncio.events._get_running_loop = self._old_get_running_loop
asyncio.set_event_loop(None)
self.loop = None
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None,
start_new_session=True)
- _, proto = yield self.loop.run_until_complete(connect)
+ transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
+ transp.close()
def test_subprocess_exec_invalid_args(self):
async def connect(**kwds):
return futures._PyFuture
+@unittest.skipUnless(hasattr(futures, '_CFuture'),
+ 'requires the C _asyncio module')
class CFutureInheritanceTests(BaseFutureInheritanceTests,
test_utils.TestCase):
def _get_future_cls(self):
# No garbage is left if SSL is closed uncleanly
client_context = weakref.ref(client_context)
+ support.gc_collect()
self.assertIsNone(client_context())
def test_create_connection_memory_leak(self):
# No garbage is left for SSL client from loop.create_connection, even
# if user stores the SSLTransport in corresponding protocol instance
client_context = weakref.ref(client_context)
+ support.gc_collect()
self.assertIsNone(client_context())
def test_start_tls_client_buf_proto_1(self):
# The 10s handshake timeout should be cancelled to free related
# objects without really waiting for 10s
client_sslctx = weakref.ref(client_sslctx)
+ support.gc_collect()
self.assertIsNone(client_sslctx())
def test_create_connection_ssl_slow_handshake(self):
self.new_task(self.loop, gen)
finally:
gen.close()
+ gc.collect() # For PyPy or other GCs.
self.assertTrue(m_log.error.called)
message = m_log.error.call_args[0][0]
def test_future_subclass(self):
self.assertTrue(issubclass(asyncio.Task, asyncio.Future))
+ @support.cpython_only
def test_asyncio_module_compiled(self):
# Because of circular imports it's easy to make _asyncio
# module non-importable. This is a simple test that will
# fail on systems where C modules were successfully compiled
- # (hence the test for _functools), but _asyncio somehow didn't.
+ # (hence the test for _functools etc), but _asyncio somehow didn't.
try:
import _functools
+ import _json
+ import _pickle
except ImportError:
- pass
+ self.skipTest('C modules are not available')
else:
try:
import _asyncio
def test_block_add_hook_baseexception(self):
self.do_test("test_block_add_hook_baseexception")
+ def test_marshal(self):
+ support.import_module("marshal")
+
+ self.do_test("test_marshal")
+
def test_pickle(self):
support.import_module("pickle")
self.assertRaises(TypeError, pynumber_tobase, '123', 10)
self.assertRaises(SystemError, pynumber_tobase, 123, 0)
+ def test_pyobject_repr_from_null(self):
+ s = _testcapi.pyobject_repr_from_null()
+ self.assertEqual(s, '<NULL>')
+
+ def test_pyobject_str_from_null(self):
+ s = _testcapi.pyobject_str_from_null()
+ self.assertEqual(s, '<NULL>')
+
+ def test_pyobject_bytes_from_null(self):
+ s = _testcapi.pyobject_bytes_from_null()
+ self.assertEqual(s, b'<NULL>')
+
class TestPendingCalls(unittest.TestCase):
except ImportError:
ctypes = None
from test.support import (run_doctest, run_unittest, cpython_only,
- check_impl_detail)
+ check_impl_detail, gc_collect)
def consts(t):
CodeType = type(co)
# test code constructor
- return CodeType(co.co_argcount,
+ CodeType(co.co_argcount,
co.co_posonlyargcount,
co.co_kwonlyargcount,
co.co_nlocals,
coderef = weakref.ref(f.__code__, callback)
self.assertTrue(bool(coderef()))
del f
+ gc_collect() # For PyPy or other GCs.
self.assertFalse(bool(coderef()))
self.assertTrue(self.called)
self.assertTrue(f1 != l1)
self.assertTrue(f1 != l2)
+ def test_Set_hash_matches_frozenset(self):
+ sets = [
+ {}, {1}, {None}, {-1}, {0.0}, {"abc"}, {1, 2, 3},
+ {10**100, 10**101}, {"a", "b", "ab", ""}, {False, True},
+ {object(), object(), object()}, {float("nan")}, {frozenset()},
+ {*range(1000)}, {*range(1000)} - {100, 200, 300},
+ {*range(sys.maxsize - 10, sys.maxsize + 10)},
+ ]
+ for s in sets:
+ fs = frozenset(s)
+ self.assertEqual(hash(fs), Set._hash(fs), msg=s)
+
def test_Mapping(self):
for sample in [dict]:
self.assertIsInstance(sample(), Mapping)
with open(self.bc_path, 'rb') as file:
data = file.read(12)
mtime = int(os.stat(self.source_path).st_mtime)
- compare = struct.pack('<4sll', importlib.util.MAGIC_NUMBER, 0, mtime)
+ compare = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER, 0,
+ mtime & 0xFFFF_FFFF)
return data, compare
+ def test_year_2038_mtime_compilation(self):
+ # Test to make sure we can handle mtimes larger than what a 32-bit
+ # signed number can hold as part of bpo-34990
+ try:
+ os.utime(self.source_path, (2**32 - 1, 2**32 - 1))
+ except (OverflowError, OSError):
+ self.skipTest("filesystem doesn't support timestamps near 2**32")
+ self.assertTrue(compileall.compile_file(self.source_path))
+
+ def test_larger_than_32_bit_times(self):
+ # This is similar to the test above but we skip it if the OS doesn't
+ # support modification times larger than 32-bits.
+ try:
+ os.utime(self.source_path, (2**35, 2**35))
+ except (OverflowError, OSError):
+ self.skipTest("filesystem doesn't support large timestamps")
+ self.assertTrue(compileall.compile_file(self.source_path))
+
def recreation_check(self, metadata):
"""Check that compileall recreates bytecode when the new metadata is
used."""
def test_mtime(self):
# Test a change in mtime leads to a new .pyc.
- self.recreation_check(struct.pack('<4sll', importlib.util.MAGIC_NUMBER,
+ self.recreation_check(struct.pack('<4sLL', importlib.util.MAGIC_NUMBER,
0, 1))
def test_magic_number(self):
compileall.compile_file(data_file)
self.assertFalse(os.path.exists(os.path.join(data_dir, '__pycache__')))
+
+ def test_compile_file_encoding_fallback(self):
+ # Bug 44666 reported that compile_file failed when sys.stdout.encoding is None
+ self.add_bad_source_file()
+ with contextlib.redirect_stdout(io.StringIO()):
+ self.assertFalse(compileall.compile_file(self.bad_source_path))
+
+
def test_optimize(self):
# make sure compiling with different optimization settings than the
# interpreter's creates the correct file names
import unittest
+import sys
from test import support
from test.test_grammar import (VALID_UNDERSCORE_LITERALS,
INVALID_UNDERSCORE_LITERALS)
b = 5.1+2.3j
self.assertRaises(ValueError, pow, a, b, 0)
+ # Check some boundary conditions; some of these used to invoke
+ # undefined behaviour (https://bugs.python.org/issue44698). We're
+ # not actually checking the results of these operations, just making
+ # sure they don't crash (for example when using clang's
+ # UndefinedBehaviourSanitizer).
+ values = (sys.maxsize, sys.maxsize+1, sys.maxsize-1,
+ -sys.maxsize, -sys.maxsize+1, -sys.maxsize+1)
+ for real in values:
+ for imag in values:
+ with self.subTest(real=real, imag=imag):
+ c = complex(real, imag)
+ try:
+ c ** real
+ except OverflowError:
+ pass
+ try:
+ c ** c
+ except OverflowError:
+ pass
+
+ def test_pow_with_small_integer_exponents(self):
+ # Check that small integer exponents are handled identically
+ # regardless of their type.
+ values = [
+ complex(5.0, 12.0),
+ complex(5.0e100, 12.0e100),
+ complex(-4.0, INF),
+ complex(INF, 0.0),
+ ]
+ exponents = [-19, -5, -3, -2, -1, 0, 1, 2, 3, 5, 19]
+ for value in values:
+ for exponent in exponents:
+ with self.subTest(value=value, exponent=exponent):
+ try:
+ int_pow = value**exponent
+ except OverflowError:
+ int_pow = "overflow"
+ try:
+ float_pow = value**float(exponent)
+ except OverflowError:
+ float_pow = "overflow"
+ try:
+ complex_pow = value**complex(exponent)
+ except OverflowError:
+ complex_pow = "overflow"
+ self.assertEqual(str(float_pow), str(int_pow))
+ self.assertEqual(str(complex_pow), str(int_pow))
+
def test_boolcontext(self):
for i in range(100):
self.assertTrue(complex(random() + 1e-6, random() + 1e-6))
executor.map(abs, range(-5, 5))
threads = executor._threads
del executor
+ support.gc_collect() # For PyPy or other GCs.
for t in threads:
self.assertRegex(t.name, r'^SpecialPool_[0-4]$')
executor.map(abs, range(-5, 5))
threads = executor._threads
del executor
+ support.gc_collect() # For PyPy or other GCs.
for t in threads:
# Ensure that our default name is reasonably sane and unique when
call_queue = executor._call_queue
executor_manager_thread = executor._executor_manager_thread
del executor
+ support.gc_collect() # For PyPy or other GCs.
# Make sure that all the executor resources were properly cleaned by
# the shutdown process
futures_list.remove(future)
wr = weakref.ref(future)
del future
+ support.gc_collect() # For PyPy or other GCs.
self.assertIsNone(wr())
futures_list[0].set_result("test")
futures_list.remove(future)
wr = weakref.ref(future)
del future
+ support.gc_collect() # For PyPy or other GCs.
self.assertIsNone(wr())
if futures_list:
futures_list[0].set_result("test")
for obj in self.executor.map(make_dummy_object, range(10)):
wr = weakref.ref(obj)
del obj
+ support.gc_collect() # For PyPy or other GCs.
self.assertIsNone(wr())
self.assertEqual(state, [1, 42, 999])
def test_contextmanager_except_stopiter(self):
- stop_exc = StopIteration('spam')
@contextmanager
def woohoo():
yield
- try:
- with self.assertWarnsRegex(DeprecationWarning,
- "StopIteration"):
- with woohoo():
- raise stop_exc
- except Exception as ex:
- self.assertIs(ex, stop_exc)
- else:
- self.fail('StopIteration was suppressed')
+
+ class StopIterationSubclass(StopIteration):
+ pass
+
+ for stop_exc in (StopIteration('spam'), StopIterationSubclass('spam')):
+ with self.subTest(type=type(stop_exc)):
+ try:
+ with woohoo():
+ raise stop_exc
+ except Exception as ex:
+ self.assertIs(ex, stop_exc)
+ else:
+ self.fail(f'{stop_exc} was suppressed')
def test_contextmanager_except_pep479(self):
code = """\
async def woohoo():
yield
- for stop_exc in (StopIteration('spam'), StopAsyncIteration('ham')):
+ class StopIterationSubclass(StopIteration):
+ pass
+
+ class StopAsyncIterationSubclass(StopAsyncIteration):
+ pass
+
+ for stop_exc in (
+ StopIteration('spam'),
+ StopAsyncIteration('ham'),
+ StopIterationSubclass('spam'),
+ StopAsyncIterationSubclass('spam')
+ ):
with self.subTest(type=type(stop_exc)):
try:
async with woohoo():
from operator import le, lt, ge, gt, eq, ne
import unittest
+from test import support
order_comparisons = le, lt, ge, gt
equality_comparisons = eq, ne
self.assertEqual(v[c], d)
self.assertEqual(len(v), 2)
del c, d
+ support.gc_collect() # For PyPy or other GCs.
self.assertEqual(len(v), 1)
x, y = C(), C()
# The underlying containers are decoupled
self.assertEqual(v[a].i, b.i)
self.assertEqual(v[c].i, d.i)
del c
+ support.gc_collect() # For PyPy or other GCs.
self.assertEqual(len(v), 1)
def test_deepcopy_weakvaluedict(self):
self.assertIs(t, d)
del x, y, z, t
del d
+ support.gc_collect() # For PyPy or other GCs.
self.assertEqual(len(v), 1)
def test_deepcopy_bound_method(self):
class testUni(csv.excel):
delimiter = "\u039B"
+ class unspecified():
+ # A class to pass as dialect but with no dialect attributes.
+ pass
+
csv.register_dialect('testC', testC)
try:
self.compare_dialect_123("1,2,3\r\n")
+ self.compare_dialect_123("1,2,3\r\n", dialect=None)
+ self.compare_dialect_123("1,2,3\r\n", dialect=unspecified)
self.compare_dialect_123("1\t2\t3\r\n", testA)
self.compare_dialect_123("1:2:3\r\n", dialect=testB())
self.compare_dialect_123("1|2|3\r\n", dialect='testC')
from test.support import (run_unittest, run_doctest, is_resource_enabled,
requires_IEEE_754, requires_docstrings)
from test.support import (import_fresh_module, TestFailed,
- run_with_locale, cpython_only)
+ run_with_locale, cpython_only,
+ darwin_malloc_err_warning)
import random
import inspect
import threading
+if sys.platform == 'darwin':
+ darwin_malloc_err_warning('test_decimal')
+
+
C = import_fresh_module('decimal', fresh=['_decimal'])
P = import_fresh_module('decimal', blocked=['_decimal'])
orig_sys_decimal = sys.modules['decimal']
p = weakref.proxy(d)
self.assertEqual(str(p), str(d))
d = None
+ support.gc_collect() # For PyPy or other GCs.
self.assertRaises(ReferenceError, str, p)
def test_strange_subclass(self):
self.assertIn(
(b'<dl><dt><a name="-add"><strong>add</strong></a>(x, y)</dt><dd>'
b'<tt>Add two instances together. This '
- b'follows <a href="http://www.python.org/dev/peps/pep-0008/">'
+ b'follows <a href="https://www.python.org/dev/peps/pep-0008/">'
b'PEP008</a>, but has nothing<br>\nto do '
b'with <a href="http://www.rfc-editor.org/rfc/rfc1952.txt">'
b'RFC1952</a>. Case should matter: pEp008 '
foo
""").encode('ascii'))
+ def test_set_content_bytes_cte_7bit(self):
+ m = self._make_message()
+ m.set_content(b'ASCII-only message.\n',
+ maintype='application', subtype='octet-stream', cte='7bit')
+ self.assertEqual(str(m), textwrap.dedent("""\
+ Content-Type: application/octet-stream
+ Content-Transfer-Encoding: 7bit
+ MIME-Version: 1.0
+
+ ASCII-only message.
+ """))
+
content_object_params = {
'text_plain': ('content', ()),
'text_html': ('content', ('html',)),
def test_parsedate_returns_None_for_invalid_strings(self):
self.assertIsNone(utils.parsedate(''))
self.assertIsNone(utils.parsedate_tz(''))
+ self.assertIsNone(utils.parsedate(' '))
+ self.assertIsNone(utils.parsedate_tz(' '))
self.assertIsNone(utils.parsedate('0'))
self.assertIsNone(utils.parsedate_tz('0'))
self.assertIsNone(utils.parsedate('A Complete Waste of Time'))
addrs = utils.getaddresses(['User ((nested comment)) <foo@bar.com>'])
eq(addrs[0][1], 'foo@bar.com')
+ def test_getaddresses_header_obj(self):
+ """Test the handling of a Header object."""
+ addrs = utils.getaddresses([Header('Al Person <aperson@dom.ain>')])
+ self.assertEqual(addrs[0][1], 'aperson@dom.ain')
+
def test_make_msgid_collisions(self):
# Test make_msgid uniqueness, even with multiple threads
class MsgidsThread(Thread):
self.assertEqual(list(m.iter_attachments()), attachments)
def message_as_iter_parts(self, body_parts, attachments, parts, msg):
+ def _is_multipart_msg(msg):
+ return 'Content-Type: multipart' in msg
+
m = self._str_msg(msg)
allparts = list(m.walk())
parts = [allparts[n] for n in parts]
- self.assertEqual(list(m.iter_parts()), parts)
+ iter_parts = list(m.iter_parts()) if _is_multipart_msg(msg) else []
+ self.assertEqual(iter_parts, parts)
class _TestContentManager:
def get_content(self, msg, *args, **kw):
b'123456789-123456789\n 123456789 Hello '
b'=?utf-8?q?W=C3=B6rld!?= 123456789 123456789\n\n')
+ def test_get_body_malformed(self):
+ """test for bpo-42892"""
+ msg = textwrap.dedent("""\
+ Message-ID: <674392CA.4347091@email.au>
+ Date: Wed, 08 Nov 2017 08:50:22 +0700
+ From: Foo Bar <email@email.au>
+ MIME-Version: 1.0
+ To: email@email.com <email@email.com>
+ Subject: Python Email
+ Content-Type: multipart/mixed;
+ boundary="------------879045806563892972123996"
+ X-Global-filter:Messagescannedforspamandviruses:passedalltests
+
+ This is a multi-part message in MIME format.
+ --------------879045806563892972123996
+ Content-Type: text/plain; charset=ISO-8859-1; format=flowed
+ Content-Transfer-Encoding: 7bit
+
+ Your message is ready to be sent with the following file or link
+ attachments:
+ XU89 - 08.11.2017
+ """)
+ m = self._str_msg(msg)
+ # In bpo-42892, this would raise
+ # AttributeError: 'str' object has no attribute 'is_attachment'
+ m.get_body()
+
+
class TestMIMEPart(TestEmailMessageBase, TestEmailBase):
# Doing the full test run here may seem a bit redundant, since the two
# classes are almost identical. But what if they drift apart? So we do
raise Exception('Exception not raised.')
def test_missing_exceptions_reset(self):
+ import gc
import weakref
#
class TestEnum(enum.Enum):
class_2_ref = weakref.ref(Class2())
#
# The exception raised by Enum creates a reference loop and thus
- # Class2 instances will stick around until the next gargage collection
+ # Class2 instances will stick around until the next garbage collection
# cycle, unlike Class1.
+ gc.collect() # For PyPy or other GCs.
self.assertIs(class_1_ref(), None)
self.assertIs(class_2_ref(), None)
""", 9, 20)
check("pass\npass\npass\n(1+)\npass\npass\npass", 4, 4)
check("(1+)", 1, 4)
- check(b"\xef\xbb\xbf#coding: utf8\nprint('\xe6\x88\x91')\n", 0, -1)
+ check(b"\xef\xbb\xbf#coding: utf8\nprint('\xe6\x88\x91')\n", 0,
+ 0 if support.use_old_parser() else -1)
# Errors thrown by symtable.c
check('x = [(yield i) for i in range(3)]', 1, 5)
except MyException as e:
pass
obj = None
+ gc_collect() # For PyPy or other GCs.
obj = wr()
self.assertIsNone(obj)
except MyException:
pass
obj = None
+ gc_collect() # For PyPy or other GCs.
obj = wr()
self.assertIsNone(obj)
except:
pass
obj = None
+ gc_collect() # For PyPy or other GCs.
obj = wr()
self.assertIsNone(obj)
except:
break
obj = None
+ gc_collect() # For PyPy or other GCs.
obj = wr()
self.assertIsNone(obj)
# must clear the latter manually for our test to succeed.
e.__context__ = None
obj = None
+ gc_collect() # For PyPy or other GCs.
obj = wr()
# guarantee no ref cycles on CPython (don't gc_collect)
if check_impl_detail(cpython=False):
next(g)
testfunc(g)
g = obj = None
+ gc_collect() # For PyPy or other GCs.
obj = wr()
self.assertIsNone(obj)
raise Exception(MyObject())
except:
pass
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(e, (None, None, None))
+ def test_raise_does_not_create_context_chain_cycle(self):
+ class A(Exception):
+ pass
+ class B(Exception):
+ pass
+ class C(Exception):
+ pass
+
+ # Create a context chain:
+ # C -> B -> A
+ # Then raise A in context of C.
+ try:
+ try:
+ raise A
+ except A as a_:
+ a = a_
+ try:
+ raise B
+ except B as b_:
+ b = b_
+ try:
+ raise C
+ except C as c_:
+ c = c_
+ self.assertIsInstance(a, A)
+ self.assertIsInstance(b, B)
+ self.assertIsInstance(c, C)
+ self.assertIsNone(a.__context__)
+ self.assertIs(b.__context__, a)
+ self.assertIs(c.__context__, b)
+ raise a
+ except A as e:
+ exc = e
+
+ # Expect A -> C -> B, without cycle
+ self.assertIs(exc, a)
+ self.assertIs(a.__context__, c)
+ self.assertIs(c.__context__, b)
+ self.assertIsNone(b.__context__)
+
+ def test_no_hang_on_context_chain_cycle1(self):
+ # See issue 25782. Cycle in context chain.
+
+ def cycle():
+ try:
+ raise ValueError(1)
+ except ValueError as ex:
+ ex.__context__ = ex
+ raise TypeError(2)
+
+ try:
+ cycle()
+ except Exception as e:
+ exc = e
+
+ self.assertIsInstance(exc, TypeError)
+ self.assertIsInstance(exc.__context__, ValueError)
+ self.assertIs(exc.__context__.__context__, exc.__context__)
+
+ def test_no_hang_on_context_chain_cycle2(self):
+ # See issue 25782. Cycle at head of context chain.
+
+ class A(Exception):
+ pass
+ class B(Exception):
+ pass
+ class C(Exception):
+ pass
+
+ # Context cycle:
+ # +-----------+
+ # V |
+ # C --> B --> A
+ with self.assertRaises(C) as cm:
+ try:
+ raise A()
+ except A as _a:
+ a = _a
+ try:
+ raise B()
+ except B as _b:
+ b = _b
+ try:
+ raise C()
+ except C as _c:
+ c = _c
+ a.__context__ = c
+ raise c
+
+ self.assertIs(cm.exception, c)
+ # Verify the expected context chain cycle
+ self.assertIs(c.__context__, b)
+ self.assertIs(b.__context__, a)
+ self.assertIs(a.__context__, c)
+
+ def test_no_hang_on_context_chain_cycle3(self):
+ # See issue 25782. Longer context chain with cycle.
+
+ class A(Exception):
+ pass
+ class B(Exception):
+ pass
+ class C(Exception):
+ pass
+ class D(Exception):
+ pass
+ class E(Exception):
+ pass
+
+ # Context cycle:
+ # +-----------+
+ # V |
+ # E --> D --> C --> B --> A
+ with self.assertRaises(E) as cm:
+ try:
+ raise A()
+ except A as _a:
+ a = _a
+ try:
+ raise B()
+ except B as _b:
+ b = _b
+ try:
+ raise C()
+ except C as _c:
+ c = _c
+ a.__context__ = c
+ try:
+ raise D()
+ except D as _d:
+ d = _d
+ e = E()
+ raise e
+
+ self.assertIs(cm.exception, e)
+ # Verify the expected context chain cycle
+ self.assertIs(e.__context__, d)
+ self.assertIs(d.__context__, c)
+ self.assertIs(c.__context__, b)
+ self.assertIs(b.__context__, a)
+ self.assertIs(a.__context__, c)
+
def test_unicode_change_attributes(self):
# See issue 7309. This was a crasher.
self.assertIsInstance(v, RecursionError, type(v))
self.assertIn("maximum recursion depth exceeded", str(v))
+
+ @cpython_only
+ def test_trashcan_recursion(self):
+ # See bpo-33930
+
+ def foo():
+ o = object()
+ for x in range(1_000_000):
+ # Create a big chain of method objects that will trigger
+ # a deep chain of calls when they need to be destructed.
+ o = o.__dir__
+
+ foo()
+ support.gc_collect()
+
@cpython_only
def test_recursion_normalizing_exception(self):
# Issue #22898.
self.assertNotEqual(wr(), None)
else:
self.fail("MemoryError not raised")
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(wr(), None)
@no_tracing
self.assertNotEqual(wr(), None)
else:
self.fail("RecursionError not raised")
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(wr(), None)
def test_errno_ENOTDIR(self):
with support.catch_unraisable_exception() as cm:
del obj
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(cm.unraisable.object, BrokenDel.__del__)
self.assertIsNotNone(cm.unraisable.exc_traceback)
import io
import _pyio as pyio
-from test.support import TESTFN
+from test.support import TESTFN, gc_collect
from test import support
from collections import UserList
self.assertEqual(self.f.tell(), p.tell())
self.f.close()
self.f = None
+ gc_collect() # For PyPy or other GCs.
self.assertRaises(ReferenceError, getattr, p, 'tell')
def testAttributes(self):
f.close()
self.fail('%r is an invalid file mode' % mode)
+ def testStdin(self):
+ if sys.platform == 'osf1V5':
+ # This causes the interpreter to exit on OSF1 v5.1.
+ self.skipTest(
+ ' sys.stdin.seek(-1) may crash the interpreter on OSF1.'
+ ' Test manually.')
+
+ if not sys.stdin.isatty():
+ # Issue 14853: stdin becomes seekable when redirected to a file
+ self.skipTest('stdin must be a TTY in this test')
+
+ with self.assertRaises((IOError, ValueError)):
+ sys.stdin.seek(-1)
+ with self.assertRaises((IOError, ValueError)):
+ sys.stdin.truncate()
+
def testBadModeArgument(self):
# verify that we get a sensible error message for bad mode argument
bad_mode = "qwerty"
from functools import wraps
from test.support import (TESTFN, TESTFN_UNICODE, check_warnings, run_unittest,
- make_bad_fd, cpython_only, swap_attr)
+ make_bad_fd, cpython_only, swap_attr, gc_collect)
from collections import UserList
import _io # C implementation of io
self.assertEqual(self.f.tell(), p.tell())
self.f.close()
self.f = None
+ gc_collect() # For PyPy or other GCs.
self.assertRaises(ReferenceError, getattr, p, 'tell')
def testSeekTell(self):
self.identical(fromHex('0X1.0000000000001fp0'), 1.0+2*EPS)
self.identical(fromHex('0x1.00000000000020p0'), 1.0+2*EPS)
+ # Regression test for a corner-case bug reported in b.p.o. 44954
+ self.identical(fromHex('0x.8p-1074'), 0.0)
+ self.identical(fromHex('0x.80p-1074'), 0.0)
+ self.identical(fromHex('0x.81p-1074'), TINY)
+ self.identical(fromHex('0x8p-1078'), 0.0)
+ self.identical(fromHex('0x8.0p-1078'), 0.0)
+ self.identical(fromHex('0x8.1p-1078'), TINY)
+ self.identical(fromHex('0x80p-1082'), 0.0)
+ self.identical(fromHex('0x81p-1082'), TINY)
+ self.identical(fromHex('.8p-1074'), 0.0)
+ self.identical(fromHex('8p-1078'), 0.0)
+ self.identical(fromHex('-.8p-1074'), -0.0)
+ self.identical(fromHex('+8p-1078'), 0.0)
+
def test_roundtrip(self):
def roundtrip(x):
return fromHex(toHex(x))
self.assertEqual(call.lineno, 3)
self.assertEqual(call.col_offset, 11)
+ @unittest.skipIf(use_old_parser(), "The old parser gets the offsets incorrectly for fstrings")
def test_ast_line_numbers_duplicate_expression(self):
- """Duplicate expression
-
- NOTE: this is currently broken, always sets location of the first
- expression.
- """
expr = """
a = 10
f'{a * x()} {a * x()} {a * x()}'
self.assertEqual(binop.lineno, 3)
self.assertEqual(binop.left.lineno, 3)
self.assertEqual(binop.right.lineno, 3)
- self.assertEqual(binop.col_offset, 3) # FIXME: this is wrong
- self.assertEqual(binop.left.col_offset, 3) # FIXME: this is wrong
- self.assertEqual(binop.right.col_offset, 7) # FIXME: this is wrong
+ self.assertEqual(binop.col_offset, 13)
+ self.assertEqual(binop.left.col_offset, 13)
+ self.assertEqual(binop.right.col_offset, 17)
# check the third binop location
binop = t.body[1].value.values[4].value
self.assertEqual(type(binop), ast.BinOp)
self.assertEqual(binop.lineno, 3)
self.assertEqual(binop.left.lineno, 3)
self.assertEqual(binop.right.lineno, 3)
- self.assertEqual(binop.col_offset, 3) # FIXME: this is wrong
- self.assertEqual(binop.left.col_offset, 3) # FIXME: this is wrong
- self.assertEqual(binop.right.col_offset, 7) # FIXME: this is wrong
+ self.assertEqual(binop.col_offset, 23)
+ self.assertEqual(binop.left.col_offset, 23)
+ self.assertEqual(binop.right.col_offset, 27)
+
+ @unittest.skipIf(use_old_parser(), "The old parser gets the offsets incorrectly for fstrings")
+ def test_ast_numbers_fstring_with_formatting(self):
+
+ t = ast.parse('f"Here is that pesky {xxx:.3f} again"')
+ self.assertEqual(len(t.body), 1)
+ self.assertEqual(t.body[0].lineno, 1)
+
+ self.assertEqual(type(t.body[0]), ast.Expr)
+ self.assertEqual(type(t.body[0].value), ast.JoinedStr)
+ self.assertEqual(len(t.body[0].value.values), 3)
+
+ self.assertEqual(type(t.body[0].value.values[0]), ast.Constant)
+ self.assertEqual(type(t.body[0].value.values[1]), ast.FormattedValue)
+ self.assertEqual(type(t.body[0].value.values[2]), ast.Constant)
+
+ _, expr, _ = t.body[0].value.values
+
+ name = expr.value
+ self.assertEqual(type(name), ast.Name)
+ self.assertEqual(name.lineno, 1)
+ self.assertEqual(name.end_lineno, 1)
+ self.assertEqual(name.col_offset, 22)
+ self.assertEqual(name.end_col_offset, 25)
def test_ast_line_numbers_multiline_fstring(self):
# See bpo-30465 for details.
p = proxy(f)
self.assertEqual(f.func, p.func)
f = None
+ support.gc_collect() # For PyPy or other GCs.
self.assertRaises(ReferenceError, getattr, p, 'func')
def test_with_bound_and_unbound_methods(self):
method_copy = pickle.loads(pickle.dumps(method, proto))
self.assertIs(method_copy, method)
+
+ def test_total_ordering_for_metaclasses_issue_44605(self):
+
+ @functools.total_ordering
+ class SortableMeta(type):
+ def __new__(cls, name, bases, ns):
+ return super().__new__(cls, name, bases, ns)
+
+ def __lt__(self, other):
+ if not isinstance(other, SortableMeta):
+ pass
+ return self.__name__ < other.__name__
+
+ def __eq__(self, other):
+ if not isinstance(other, SortableMeta):
+ pass
+ return self.__name__ == other.__name__
+
+ class B(metaclass=SortableMeta):
+ pass
+
+ class A(metaclass=SortableMeta):
+ pass
+
+ self.assertTrue(A < B)
+ self.assertFalse(A > B)
+
+
@functools.total_ordering
class Orderable_LT:
def __init__(self, value):
# empty __dict__.
self.assertEqual(x, None)
+
+class PythonFinalizationTests(unittest.TestCase):
+ def test_ast_fini(self):
+ # bpo-44184: Regression test for subtype_dealloc() when deallocating
+ # an AST instance also destroy its AST type: subtype_dealloc() must
+ # not access the type memory after deallocating the instance, since
+ # the type memory can be freed as well. The test is also related to
+ # _PyAST_Fini() which clears references to AST types.
+ code = textwrap.dedent("""
+ import ast
+ import codecs
+
+ # Small AST tree to keep their AST types alive
+ tree = ast.parse("def f(x, y): return 2*x-y")
+ x = [tree]
+ x.append(x)
+
+ # Put the cycle somewhere to survive until the last GC collection.
+ # Codec search functions are only cleared at the end of
+ # interpreter_clear().
+ def search_func(encoding):
+ return None
+ search_func.a = x
+ codecs.register(search_func)
+ """)
+ assert_python_ok("-c", code)
+
+
def test_main():
enabled = gc.isenabled()
gc.disable()
try:
gc.collect() # Delete 2nd generation garbage
- run_unittest(GCTests, GCTogglingTests, GCCallbackTests)
+ run_unittest(
+ GCTests,
+ GCCallbackTests,
+ GCTogglingTests,
+ PythonFinalizationTests)
finally:
gc.set_debug(debug)
# test gc.enable() even if GC is disabled by default
self.assertEqual(next(g), "done")
self.assertEqual(sys.exc_info(), (None, None, None))
+ def test_except_throw_bad_exception(self):
+ class E(Exception):
+ def __new__(cls, *args, **kwargs):
+ return cls
+
+ def boring_generator():
+ yield
+
+ gen = boring_generator()
+
+ err_msg = 'should have returned an instance of BaseException'
+
+ with self.assertRaisesRegex(TypeError, err_msg):
+ gen.throw(E)
+
+ self.assertRaises(StopIteration, next, gen)
+
+ def generator():
+ with self.assertRaisesRegex(TypeError, err_msg):
+ yield
+
+ gen = generator()
+ next(gen)
+ with self.assertRaises(StopIteration):
+ gen.throw(E)
+
def test_stopiteration_error(self):
# See also PEP 479.
"""
coroutine_tests = """\
+>>> from test.support import gc_collect
+
Sending a value into a started generator:
>>> def f():
>>> g = f()
>>> next(g)
->>> del g
+>>> del g; gc_collect() # For PyPy or other GCs.
exiting
>>> g = f()
>>> next(g)
->>> del g
+>>> del g; gc_collect() # For PyPy or other GCs.
finally
#!%s
import os
-print("Content-type: text/plain")
+print("X-ambv: was here")
+print("Content-type: text/html")
print()
-print(repr(os.environ))
+print("<pre>")
+for k, v in os.environ.items():
+ try:
+ k.encode('ascii')
+ v.encode('ascii')
+ except UnicodeEncodeError:
+ continue # see: BPO-44647
+ print(f"{k}={v}")
+print("</pre>")
"""
with self.subTest(headers):
res = self.request('/cgi-bin/file6.py', 'GET', headers=headers)
self.assertEqual(http.HTTPStatus.OK, res.status)
- expected = f"'HTTP_ACCEPT': {expected!r}"
- self.assertIn(expected.encode('ascii'), res.read())
+ expected = f"HTTP_ACCEPT={expected}".encode('ascii')
+ self.assertIn(expected, res.read())
class SocketlessRequestHandler(SimpleHTTPRequestHandler):
('bar', 2, ..., "keyword_only")),
...))
+ def test_signature_on_subclass(self):
+ class A:
+ def __new__(cls, a=1, *args, **kwargs):
+ return object.__new__(cls)
+ class B(A):
+ def __init__(self, b):
+ pass
+ class C(A):
+ def __new__(cls, a=1, b=2, *args, **kwargs):
+ return object.__new__(cls)
+ class D(A):
+ pass
+
+ self.assertEqual(self.signature(B),
+ ((('b', ..., ..., "positional_or_keyword"),),
+ ...))
+ self.assertEqual(self.signature(C),
+ ((('a', 1, ..., 'positional_or_keyword'),
+ ('b', 2, ..., 'positional_or_keyword'),
+ ('args', ..., ..., 'var_positional'),
+ ('kwargs', ..., ..., 'var_keyword')),
+ ...))
+ self.assertEqual(self.signature(D),
+ ((('a', 1, ..., 'positional_or_keyword'),
+ ('args', ..., ..., 'var_positional'),
+ ('kwargs', ..., ..., 'var_keyword')),
+ ...))
+
+ def test_signature_on_generic_subclass(self):
+ from typing import Generic, TypeVar
+
+ T = TypeVar('T')
+
+ class A(Generic[T]):
+ def __init__(self, *, a: int) -> None:
+ pass
+
+ self.assertEqual(self.signature(A),
+ ((('a', ..., int, 'keyword_only'),),
+ None))
+
@unittest.skipIf(MISSING_C_DOCSTRINGS,
"Signature information for builtins requires docstrings")
def test_signature_on_class_without_init(self):
"""Check that a partial write, when it gets interrupted, properly
invokes the signal handler, and bubbles up the exception raised
in the latter."""
+
+ # XXX This test has three flaws that appear when objects are
+ # XXX not reference counted.
+
+ # - if wio.write() happens to trigger a garbage collection,
+ # the signal exception may be raised when some __del__
+ # method is running; it will not reach the assertRaises()
+ # call.
+
+ # - more subtle, if the wio object is not destroyed at once
+ # and survives this function, the next opened file is likely
+ # to have the same fileno (since the file descriptor was
+ # actively closed). When wio.__del__ is finally called, it
+ # will close the other's test file... To trigger this with
+ # CPython, try adding "global wio" in this function.
+
+ # - This happens only for streams created by the _pyio module,
+ # because a wio.close() that fails still consider that the
+ # file needs to be closed again. You can try adding an
+ # "assert wio.closed" at the end of the function.
+
+ # Fortunately, a little gc.collect() seems to be enough to
+ # work around all these issues.
+ support.gc_collect() # For PyPy or other GCs.
+
read_results = []
def _read():
s = os.read(r, 1)
p = weakref.proxy(a)
self.assertEqual(getattr(p, '__class__'), type(b))
del a
+ support.gc_collect() # For PyPy or other GCs.
self.assertRaises(ReferenceError, getattr, p, '__class__')
ans = list('abc')
self.assertRaises(ZeroDivisionError, operator.countOf, BadIterable(), 1)
self.assertEqual(operator.countOf([1, 2, 1, 3, 1, 4], 3), 1)
self.assertEqual(operator.countOf([1, 2, 1, 3, 1, 4], 5), 0)
+ # is but not ==
+ nan = float("nan")
+ self.assertEqual(operator.countOf([nan, nan, 21], nan), 2)
+ # == but not is
+ self.assertEqual(operator.countOf([{}, 1, {}, 2], {}), 2)
def test_delitem(self):
operator = self.module
self.assertRaises(ZeroDivisionError, operator.indexOf, BadIterable(), 1)
self.assertEqual(operator.indexOf([4, 3, 2, 1], 3), 1)
self.assertRaises(ValueError, operator.indexOf, [4, 3, 2, 1], 0)
+ nan = float("nan")
+ self.assertEqual(operator.indexOf([nan, nan, 21], nan), 0)
+ self.assertEqual(operator.indexOf([{}, 1, {}, 2], {}), 0)
def test_invert(self):
operator = self.module
self.assertIs(False, P('').is_reserved())
self.assertIs(False, P('/').is_reserved())
self.assertIs(False, P('/foo/bar').is_reserved())
+ # UNC paths are never reserved.
+ self.assertIs(False, P('//my/share/nul/con/aux').is_reserved())
+ # Case-insenstive DOS-device names are reserved.
+ self.assertIs(True, P('nul').is_reserved())
+ self.assertIs(True, P('aux').is_reserved())
+ self.assertIs(True, P('prn').is_reserved())
self.assertIs(True, P('con').is_reserved())
- self.assertIs(True, P('NUL').is_reserved())
+ self.assertIs(True, P('conin$').is_reserved())
+ self.assertIs(True, P('conout$').is_reserved())
+ # COM/LPT + 1-9 or + superscript 1-3 are reserved.
+ self.assertIs(True, P('COM1').is_reserved())
+ self.assertIs(True, P('LPT9').is_reserved())
+ self.assertIs(True, P('com\xb9').is_reserved())
+ self.assertIs(True, P('com\xb2').is_reserved())
+ self.assertIs(True, P('lpt\xb3').is_reserved())
+ # DOS-device name mataching ignores characters after a dot or
+ # a colon and also ignores trailing spaces.
self.assertIs(True, P('NUL.txt').is_reserved())
- self.assertIs(True, P('com1').is_reserved())
- self.assertIs(True, P('com9.bar').is_reserved())
+ self.assertIs(True, P('PRN ').is_reserved())
+ self.assertIs(True, P('AUX .txt').is_reserved())
+ self.assertIs(True, P('COM1:bar').is_reserved())
+ self.assertIs(True, P('LPT9 :bar').is_reserved())
+ # DOS-device names are only matched at the beginning
+ # of a path component.
self.assertIs(False, P('bar.com9').is_reserved())
- self.assertIs(True, P('lpt1').is_reserved())
- self.assertIs(True, P('lpt9.bar').is_reserved())
self.assertIs(False, P('bar.lpt9').is_reserved())
- # Only the last component matters.
+ # Only the last path component matters.
+ self.assertIs(True, P('c:/baz/con/NUL').is_reserved())
self.assertIs(False, P('c:/NUL/con/baz').is_reserved())
- # UNC paths are never reserved.
- self.assertIs(False, P('//my/share/nul/con/aux').is_reserved())
class PurePathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PurePath
self.assertIn("ImportError: No module named t_main.__main__",
stdout.splitlines())
+ def test_package_without_a_main(self):
+ pkg_name = 't_pkg'
+ module_name = 't_main'
+ support.rmtree(pkg_name)
+ modpath = pkg_name + '/' + module_name
+ os.makedirs(modpath)
+ with open(modpath + '/__init__.py', 'w') as f:
+ pass
+ self.addCleanup(support.rmtree, pkg_name)
+ stdout, stderr = self._run_pdb(['-m', modpath.replace('/', '.')], "")
+ self.assertIn(
+ "'t_pkg.t_main' is a package and cannot be directly executed",
+ stdout)
+
def test_blocks_at_first_code_line(self):
script = """
#This is a comment, on line 2
'(Pdb) ',
])
+ def test_issue34266(self):
+ '''do_run handles exceptions from parsing its arg'''
+ def check(bad_arg, msg):
+ commands = "\n".join([
+ f'run {bad_arg}',
+ 'q',
+ ])
+ stdout, _ = self.run_pdb_script('pass', commands + '\n')
+ self.assertEqual(stdout.splitlines()[1:], [
+ '-> pass',
+ f'(Pdb) *** Cannot run {bad_arg}: {msg}',
+ '(Pdb) ',
+ ])
+ check('\\', 'No escaped character')
+ check('"', 'No closing quotation')
def test_issue42384(self):
'''When running `python foo.py` sys.path[0] is an absolute path. `python -m pdb foo.py` should behave the same'''
('f-string_doublestarred', "f'{ {**x} }'"),
('f-string_escape_brace', "f'{{Escape'"),
('f-string_escape_closing_brace', "f'Escape}}'"),
- ('f-string_repr', "f'{a!r}'"),
- ('f-string_str', "f'{a!s}'"),
- ('f-string_ascii', "f'{a!a}'"),
- ('f-string_debug', "f'{a=}'"),
- ('f-string_padding', "f'{a:03d}'"),
('f-string_multiline',
"""
f'''
import collections
import struct
import sys
+import warnings
import weakref
import unittest
return sys.modules[module]
except KeyError:
try:
- __import__(module)
+ with warnings.catch_warnings():
+ action = 'always' if support.verbose else 'ignore'
+ warnings.simplefilter(action, DeprecationWarning)
+ __import__(module)
except AttributeError as exc:
if support.verbose:
print("Can't import module %r: %s" % (module, exc))
import unittest
import weakref
from test import support
+from test.support import gc_collect
py_queue = support.import_fresh_module('queue', blocked=['_queue'])
c_queue = support.import_fresh_module('queue', fresh=['_queue'])
q.put(C())
for i in range(N):
wr = weakref.ref(q.get())
+ gc_collect() # For PyPy or other GCs.
self.assertIsNone(wr())
f()
def test_3611(self):
+ import gc
# A re-raised exception in a __del__ caused the __context__
# to be cleared
class C:
x = C()
try:
try:
- x.x
+ f.x
except AttributeError:
+ # make x.__del__ trigger
del x
+ gc.collect() # For PyPy or other GCs.
raise TypeError
except Exception as e:
self.assertNotEqual(e.__context__, None)
list(r))
def test_iterator_pickling(self):
- testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1),
- (13, 21, 3), (-2, 2, 2), (2**65, 2**65+2)]
+ testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1), (13, 21, 3),
+ (-2, 2, 2), (2**31-3, 2**31-1), (2**33, 2**33+2),
+ (2**63-3, 2**63-1), (2**65, 2**65+2)]
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
for t in testcases:
- it = itorg = iter(range(*t))
- data = list(range(*t))
-
- d = pickle.dumps(it, proto)
- it = pickle.loads(d)
- self.assertEqual(type(itorg), type(it))
- self.assertEqual(list(it), data)
-
- it = pickle.loads(d)
- try:
- next(it)
- except StopIteration:
- continue
+ with self.subTest(proto=proto, t=t):
+ it = itorg = iter(range(*t))
+ data = list(range(*t))
+
+ d = pickle.dumps(it, proto)
+ it = pickle.loads(d)
+ self.assertEqual(type(itorg), type(it))
+ self.assertEqual(list(it), data)
+
+ it = pickle.loads(d)
+ try:
+ next(it)
+ except StopIteration:
+ continue
+ d = pickle.dumps(it, proto)
+ it = pickle.loads(d)
+ self.assertEqual(list(it), data[1:])
+
+ def test_iterator_pickling_overflowing_index(self):
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(proto=proto):
+ it = iter(range(2**32 + 2))
+ _, _, idx = it.__reduce__()
+ self.assertEqual(idx, 0)
+ it.__setstate__(2**32 + 1) # undocumented way to set r->index
+ _, _, idx = it.__reduce__()
+ self.assertEqual(idx, 2**32 + 1)
d = pickle.dumps(it, proto)
it = pickle.loads(d)
- self.assertEqual(list(it), data[1:])
+ self.assertEqual(next(it), 2**32 + 1)
def test_exhausted_iterator_pickling(self):
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
def test_auto_history_enabled(self):
output = run_pty(self.auto_history_script.format(True))
- self.assertIn(b"History length: 1\r\n", output)
+ # bpo-44949: Sometimes, the newline character is not written at the
+ # end, so don't expect it in the output.
+ self.assertIn(b"History length: 1", output)
def test_auto_history_disabled(self):
output = run_pty(self.auto_history_script.format(False))
- self.assertIn(b"History length: 0\r\n", output)
+ # bpo-44949: Sometimes, the newline character is not written at the
+ # end, so don't expect it in the output.
+ self.assertIn(b"History length: 0", output)
def test_nonascii(self):
loc = locale.setlocale(locale.LC_CTYPE, None)
import sysconfig
import tempfile
import textwrap
+import time
import unittest
from test import libregrtest
from test import support
def check_executed_tests(self, output, tests, skipped=(), failed=(),
env_changed=(), omitted=(),
- rerun=(), no_test_ran=(),
+ rerun={}, no_test_ran=(),
randomize=False, interrupted=False,
fail_env_changed=False):
if isinstance(tests, str):
env_changed = [env_changed]
if isinstance(omitted, str):
omitted = [omitted]
- if isinstance(rerun, str):
- rerun = [rerun]
if isinstance(no_test_ran, str):
no_test_ran = [no_test_ran]
self.check_line(output, regex)
if rerun:
- regex = list_regex('%s re-run test%s', rerun)
+ regex = list_regex('%s re-run test%s', rerun.keys())
self.check_line(output, regex)
regex = LOG_PREFIX + r"Re-running failed tests in verbose mode"
self.check_line(output, regex)
- for test_name in rerun:
- regex = LOG_PREFIX + f"Re-running {test_name} in verbose mode"
+ for name, match in rerun.items():
+ regex = LOG_PREFIX + f"Re-running {name} in verbose mode \\(matching: {match}\\)"
self.check_line(output, regex)
if no_test_ran:
class CheckActualTests(BaseTestCase):
- """
- Check that regrtest appears to find the expected set of tests.
- """
-
def test_finds_expected_number_of_tests(self):
+ """
+ Check that regrtest appears to find the expected set of tests.
+ """
args = ['-Wd', '-E', '-bb', '-m', 'test.regrtest', '--list-tests']
output = self.run_python(args)
rough_number_of_tests_found = len(output.splitlines())
import unittest
class Tests(unittest.TestCase):
- def test_bug(self):
- # test always fail
+ def test_succeed(self):
+ return
+
+ def test_fail_always(self):
+ # test that always fails
self.fail("bug")
""")
testname = self.create_test(code=code)
output = self.run_tests("-w", testname, exitcode=2)
self.check_executed_tests(output, [testname],
- failed=testname, rerun=testname)
+ failed=testname, rerun={testname: "test_fail_always"})
def test_rerun_success(self):
# FAILURE then SUCCESS
import unittest
class Tests(unittest.TestCase):
- failed = False
+ def test_succeed(self):
+ return
def test_fail_once(self):
if not hasattr(builtins, '_test_failed'):
output = self.run_tests("-w", testname, exitcode=0)
self.check_executed_tests(output, [testname],
- rerun=testname)
+ rerun={testname: "test_fail_once"})
def test_no_tests_ran(self):
code = textwrap.dedent("""
if x.startswith('s')])
def test_excessive_getattr(self):
- # Ensure getattr() is invoked no more than once per attribute
+ """Ensure getattr() is invoked no more than once per attribute"""
+
+ # note the special case for @property methods below; that is why
+ # we use __dir__ and __getattr__ in class Foo to create a "magic"
+ # class attribute 'bar'. This forces `getattr` to call __getattr__
+ # (which is doesn't necessarily do).
class Foo:
calls = 0
+ bar = ''
+ def __getattribute__(self, name):
+ if name == 'bar':
+ self.calls += 1
+ return None
+ return super().__getattribute__(name)
+
+ f = Foo()
+ completer = rlcompleter.Completer(dict(f=f))
+ self.assertEqual(completer.complete('f.b', 0), 'f.bar')
+ self.assertEqual(f.calls, 1)
+
+ def test_property_method_not_called(self):
+ class Foo:
+ _bar = 0
+ property_called = False
+
@property
def bar(self):
- self.calls += 1
- return None
+ self.property_called = True
+ return self._bar
+
f = Foo()
completer = rlcompleter.Completer(dict(f=f))
self.assertEqual(completer.complete('f.b', 0), 'f.bar')
- self.assertEqual(f.calls, 1)
+ self.assertFalse(f.property_called)
+
def test_uncreated_attr(self):
# Attributes like properties and slots should be completed even when
import weakref
from test.support import check_syntax_error, cpython_only
+from test.support import gc_collect
class ScopeTests(unittest.TestCase):
for i in range(100):
f1()
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(Foo.count, 0)
def testClassAndGlobal(self):
tester.dig()
ref = weakref.ref(tester)
del tester
+ gc_collect() # For PyPy or other GCs.
self.assertIsNone(ref())
p = weakref.proxy(s)
self.assertEqual(str(p), str(s))
s = None
+ support.gc_collect() # For PyPy or other GCs.
self.assertRaises(ReferenceError, str, p)
def test_rich_compare(self):
TESTFN_SRC = TESTFN + "_SRC"
TESTFN_DST = TESTFN + "_DST"
MACOS = sys.platform.startswith("darwin")
+SOLARIS = sys.platform.startswith("sunos")
AIX = sys.platform[:3] == 'aix'
try:
import grp
# Make sure file is not corrupted.
self.assertEqual(read_file(src_file), 'foo')
+ @unittest.skipIf(MACOS or SOLARIS or _winapi, 'On MACOS, Solaris and Windows the errors are not confusing (though different)')
+ def test_copyfile_nonexistent_dir(self):
+ # Issue 43219
+ src_dir = self.mkdtemp()
+ src_file = os.path.join(src_dir, 'foo')
+ dst = os.path.join(src_dir, 'does_not_exist/')
+ write_file(src_file, 'foo')
+ self.assertRaises(FileNotFoundError, shutil.copyfile, src_file, dst)
+
class TestArchives(BaseTest, unittest.TestCase):
self.assertEqual(smtp.getreply(), expected)
smtp.quit()
+ def test_issue43124_putcmd_escapes_newline(self):
+ # see: https://bugs.python.org/issue43124
+ smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost',
+ timeout=support.LOOPBACK_TIMEOUT)
+ self.addCleanup(smtp.close)
+ with self.assertRaises(ValueError) as exc:
+ smtp.putcmd('helo\nX-INJECTED')
+ self.assertIn("prohibited newline characters", str(exc.exception))
+ smtp.quit()
+
def testVRFY(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost',
timeout=support.LOOPBACK_TIMEOUT)
mexpect = '%s%s\n%s' % (MSG_BEGIN, m, MSG_END)
self.assertEqual(self.output.getvalue(), mexpect)
+ def test_issue43124_escape_localhostname(self):
+ # see: https://bugs.python.org/issue43124
+ # connect and send mail
+ m = 'wazzuuup\nlinetwo'
+ smtp = smtplib.SMTP(HOST, self.port, local_hostname='hi\nX-INJECTED',
+ timeout=support.LOOPBACK_TIMEOUT)
+ self.addCleanup(smtp.close)
+ with self.assertRaises(ValueError) as exc:
+ smtp.sendmail("hi@me.com", "you@me.com", m)
+ self.assertIn(
+ "prohibited newline characters: ehlo hi\\nX-INJECTED",
+ str(exc.exception),
+ )
+ # XXX (see comment in testSend)
+ time.sleep(0.01)
+ smtp.quit()
+
+ debugout = smtpd.DEBUGSTREAM.getvalue()
+ self.assertNotIn("X-INJECTED", debugout)
+
+ def test_issue43124_escape_options(self):
+ # see: https://bugs.python.org/issue43124
+ # connect and send mail
+ m = 'wazzuuup\nlinetwo'
+ smtp = smtplib.SMTP(
+ HOST, self.port, local_hostname='localhost',
+ timeout=support.LOOPBACK_TIMEOUT)
+
+ self.addCleanup(smtp.close)
+ smtp.sendmail("hi@me.com", "you@me.com", m)
+ with self.assertRaises(ValueError) as exc:
+ smtp.mail("hi@me.com", ["X-OPTION\nX-INJECTED-1", "X-OPTION2\nX-INJECTED-2"])
+ msg = str(exc.exception)
+ self.assertIn("prohibited newline characters", msg)
+ self.assertIn("X-OPTION\\nX-INJECTED-1 X-OPTION2\\nX-INJECTED-2", msg)
+ # XXX (see comment in testSend)
+ time.sleep(0.01)
+ smtp.quit()
+
+ debugout = smtpd.DEBUGSTREAM.getvalue()
+ self.assertNotIn("X-OPTION", debugout)
+ self.assertNotIn("X-OPTION2", debugout)
+ self.assertNotIn("X-INJECTED-1", debugout)
+ self.assertNotIn("X-INJECTED-2", debugout)
+
def testSendNullSender(self):
m = 'A test message'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost',
p = proxy(s)
self.assertEqual(p.fileno(), s.fileno())
s = None
+ support.gc_collect() # For PyPy or other GCs.
try:
p.fileno()
except ReferenceError:
pid = p.pid
with support.check_warnings(('', ResourceWarning)):
p = None
+ support.gc_collect() # For PyPy or other GCs.
os.kill(pid, signal.SIGKILL)
if mswindows:
import textwrap
import time
import unittest
+import warnings
+
from test import support
from test.support import script_helper
from test.support import socket_helper
class TestSupport(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ orig_filter_len = len(warnings.filters)
+ cls._warnings_helper_token = support.ignore_deprecations_from(
+ "test.test_support", like=".*used in test_support.*"
+ )
+ cls._test_support_token = support.ignore_deprecations_from(
+ "test.test_support", like=".*You should NOT be seeing this.*"
+ )
+ assert len(warnings.filters) == orig_filter_len + 2
+
+ @classmethod
+ def tearDownClass(cls):
+ orig_filter_len = len(warnings.filters)
+ support.clear_ignored_deprecations(
+ cls._warnings_helper_token,
+ cls._test_support_token,
+ )
+ assert len(warnings.filters) == orig_filter_len - 2
+
+ def test_ignored_deprecations_are_silent(self):
+ """Test support.ignore_deprecations_from() silences warnings"""
+ with warnings.catch_warnings(record=True) as warning_objs:
+ _warn_about_deprecation()
+ warnings.warn("You should NOT be seeing this.", DeprecationWarning)
+ messages = [str(w.message) for w in warning_objs]
+ self.assertEqual(len(messages), 0, messages)
def test_import_module(self):
support.import_module("ftplib")
# SuppressCrashReport
+def _warn_about_deprecation():
+ # In 3.10+ this lives in test.support.warnings_helper
+ warnings.warn(
+ "This is used in test_support test to ensure"
+ " support.ignore_deprecations_from() works as expected."
+ " You should not be seeing this.",
+ DeprecationWarning,
+ stacklevel=0,
+ )
+
+
def test_main():
tests = [TestSupport]
support.run_unittest(*tests)
>>> f((x)=2)
Traceback (most recent call last):
SyntaxError: expression cannot contain assignment, perhaps you meant "=="?
->>> f(True=2)
-Traceback (most recent call last):
-SyntaxError: expression cannot contain assignment, perhaps you meant "=="?
>>> f(__debug__=1)
Traceback (most recent call last):
SyntaxError: cannot assign to __debug__
...
SyntaxError: 'break' outside loop
-This raises a SyntaxError, it used to raise a SystemError.
-Context for this change can be found on issue #27514
-
-In 2.5 there was a missing exception and an assert was triggered in a debug
-build. The number of blocks must be greater than CO_MAXBLOCKS. SF #1565514
-
- >>> while 1:
- ... while 2:
- ... while 3:
- ... while 4:
- ... while 5:
- ... while 6:
- ... while 8:
- ... while 9:
- ... while 10:
- ... while 11:
- ... while 12:
- ... while 13:
- ... while 14:
- ... while 15:
- ... while 16:
- ... while 17:
- ... while 18:
- ... while 19:
- ... while 20:
- ... while 21:
- ... while 22:
- ... break
- Traceback (most recent call last):
- ...
- SyntaxError: too many statically nested blocks
-
Misuse of the nonlocal and global statement can lead to a few unique syntax errors.
>>> def f():
Traceback (most recent call last):
SyntaxError: trailing comma not allowed without surrounding parentheses
+# Check that we dont raise the "trailing comma" error if there is more
+# input to the left of the valid part that we parsed.
+
+>>> from t import x,y, and 3
+Traceback (most recent call last):
+SyntaxError: invalid syntax
+
>>> (): int
Traceback (most recent call last):
SyntaxError: only single target (not tuple) can be annotated
def test_invalid_line_continuation_error_position(self):
self._check_error(r"a = 3 \ 4",
"unexpected character after line continuation character",
- lineno=1, offset=9)
+ lineno=1, offset=(10 if support.use_old_parser() else 9))
def test_invalid_line_continuation_left_recursive(self):
# Check bpo-42218: SyntaxErrors following left-recursive rules
self._check_error("A.\u03bc\\\n",
"unexpected EOF while parsing")
+ @support.cpython_only
+ def test_syntax_error_on_deeply_nested_blocks(self):
+ # This raises a SyntaxError, it used to raise a SystemError. Context
+ # for this change can be found on issue #27514
+
+ # In 2.5 there was a missing exception and an assert was triggered in a
+ # debug build. The number of blocks must be greater than CO_MAXBLOCKS.
+ # SF #1565514
+
+ source = """
+while 1:
+ while 2:
+ while 3:
+ while 4:
+ while 5:
+ while 6:
+ while 8:
+ while 9:
+ while 10:
+ while 11:
+ while 12:
+ while 13:
+ while 14:
+ while 15:
+ while 16:
+ while 17:
+ while 18:
+ while 19:
+ while 20:
+ while 21:
+ while 22:
+ break
+"""
+ self._check_error(source, "too many statically nested blocks")
+
+
def test_main():
support.run_unittest(SyntaxTestCase)
from test import test_syntax
self.compare_events(doit_async.__code__.co_firstlineno,
tracer.events, events)
+ def test_21_async_for_else(self):
+
+ async def async_gen():
+ yield -2
+
+ async def async_test():
+ global a
+ a = 2
+ async for i in async_gen():
+ a = 4
+ else:
+ a = 6
+
+ def run(tracer):
+ x = async_test()
+ try:
+ sys.settrace(tracer)
+ x.send(None)
+ finally:
+ sys.settrace(None)
+
+ tracer = self.make_tracer()
+ events = [
+ (0, 'call'),
+ (2, 'line'),
+ (3, 'line'),
+ (-3, 'call'),
+ (-2, 'line'),
+ (-2, 'return'),
+ (3, 'exception'),
+ (4, 'line'),
+ (3, 'line'),
+ (-2, 'call'),
+ (-2, 'return'),
+ (3, 'exception'),
+ (6, 'line'),
+ (6, 'return')]
+ try:
+ run(tracer.trace)
+ except Exception:
+ pass
+ self.compare_events(async_test.__code__.co_firstlineno,
+ tracer.events, events)
+
class SkipLineEventsTraceTestCase(TraceTestCase):
"""Repeat the trace tests, but with per-line events skipped"""
class GzipCreateTest(GzipTest, CreateTest):
- pass
+
+ def test_create_with_compresslevel(self):
+ with tarfile.open(tmpname, self.mode, compresslevel=1) as tobj:
+ tobj.add(self.file_path)
+ with tarfile.open(tmpname, 'r:gz', compresslevel=1) as tobj:
+ pass
class Bz2CreateTest(Bz2Test, CreateTest):
- pass
+
+ def test_create_with_compresslevel(self):
+ with tarfile.open(tmpname, self.mode, compresslevel=1) as tobj:
+ tobj.add(self.file_path)
+ with tarfile.open(tmpname, 'r:bz2', compresslevel=1) as tobj:
+ pass
class LzmaCreateTest(LzmaTest, CreateTest):
- pass
+
+ # Unlike gz and bz2, xz uses the preset keyword instead of compresslevel.
+ # It does not allow for preset to be specified when reading.
+ def test_create_with_preset(self):
+ with tarfile.open(tmpname, self.mode, preset=1) as tobj:
+ tobj.add(self.file_path)
class CreateWithXModeTest(CreateTest):
class TkinterTest(unittest.TestCase):
def testFlattenLen(self):
- # flatten(<object with no length>)
+ # Object without length.
self.assertRaises(TypeError, _tkinter._flatten, True)
+ # Object with length, but not sequence.
+ self.assertRaises(TypeError, _tkinter._flatten, {})
+ # Sequence or set, but not tuple or list.
+ # (issue44608: there were leaks in the following cases)
+ self.assertRaises(TypeError, _tkinter._flatten, 'string')
+ self.assertRaises(TypeError, _tkinter._flatten, {'set'})
class TclTest(unittest.TestCase):
self.do_create(dir=dir).write(b"blat")
self.do_create(dir=pathlib.Path(dir)).write(b"blat")
finally:
+ support.gc_collect() # For PyPy or other GCs.
os.rmdir(dir)
def test_file_mode(self):
extant = list(range(TEST_FILES))
for i in extant:
extant[i] = self.do_create(pre="aa")
+ del extant
+ support.gc_collect() # For PyPy or other GCs.
## def test_warning(self):
## # mktemp issues a warning when used
del task
while not done:
time.sleep(POLL_SLEEP)
+ support.gc_collect() # For PyPy or other GCs.
self.assertEqual(thread._count(), orig)
def test_unraisable_exception(self):
t.join()
del t
- gc.collect()
+ support.gc_collect() # For PyPy or other GCs.
self.assertEqual(len(weaklist), n)
# XXX _threading_local keeps the local of the last stopped thread alive.
# Assignment to the same thread local frees it sometimes (!)
local.someothervar = None
- gc.collect()
+ support.gc_collect() # For PyPy or other GCs.
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n), (n, len(deadlist)))
# 2) GC the cycle (triggers threadmodule.c::local_clear
# before local_dealloc)
del cycle
- gc.collect()
+ support.gc_collect() # For PyPy or other GCs.
e1.set()
e2.wait()
x.local.x = x
wr = weakref.ref(x)
del x
- gc.collect()
+ support.gc_collect() # For PyPy or other GCs.
self.assertIsNone(wr())
# See http://bugs.python.org/issue16152
self.assertExactTypeEqual('@ ', token.AT)
+ def test_comment_at_the_end_of_the_source_without_newline(self):
+ # See http://bugs.python.org/issue44667
+ source = 'b = 1\n\n#test'
+ expected_tokens = [token.NAME, token.EQUAL, token.NUMBER, token.NEWLINE, token.NL, token.COMMENT]
+
+ tokens = list(tokenize(BytesIO(source.encode('utf-8')).readline))
+ self.assertEqual(tok_name[tokens[0].exact_type], tok_name[ENCODING])
+ for i in range(6):
+ self.assertEqual(tok_name[tokens[i + 1].exact_type], tok_name[expected_tokens[i]])
+ self.assertEqual(tok_name[tokens[-1].exact_type], tok_name[token.ENDMARKER])
class UntokenizeTest(TestCase):
lst = traceback.format_exception_only(e.__class__, e)
self.assertEqual(lst, ['KeyboardInterrupt\n'])
+ def test_traceback_context_recursionerror(self):
+ # Test that for long traceback chains traceback does not itself
+ # raise a recursion error while printing (Issue43048)
+
+ # Calling f() creates a stack-overflowing __context__ chain.
+ def f():
+ try:
+ raise ValueError('hello')
+ except ValueError:
+ f()
+
+ try:
+ f()
+ except RecursionError:
+ exc_info = sys.exc_info()
+
+ traceback.format_exception(exc_info[0], exc_info[1], exc_info[2])
+
+ def test_traceback_cause_recursionerror(self):
+ # Same as test_traceback_context_recursionerror, but with
+ # a __cause__ chain.
+
+ def f():
+ e = None
+ try:
+ f()
+ except Exception as exc:
+ e = exc
+ raise Exception from e
+
+ try:
+ f()
+ except Exception:
+ exc_info = sys.exc_info()
+
+ traceback.format_exception(exc_info[0], exc_info[1], exc_info[2])
+
def test_format_exception_only_bad__str__(self):
class X(Exception):
def __str__(self):
self.assertVectorsAlmostEqual(-vec, expected)
def test_distance(self):
- vec = Vec2D(6, 8)
- expected = 10
- self.assertEqual(abs(vec), expected)
-
- vec = Vec2D(0, 0)
- expected = 0
- self.assertEqual(abs(vec), expected)
-
- vec = Vec2D(2.5, 6)
- expected = 6.5
- self.assertEqual(abs(vec), expected)
+ self.assertEqual(abs(Vec2D(6, 8)), 10)
+ self.assertEqual(abs(Vec2D(0, 0)), 0)
+ self.assertAlmostEqual(abs(Vec2D(2.5, 6)), 6.5)
def test_rotate(self):
import types
from test import mod_generics_cache
+from test import _typed_dict_helper
class BaseTestCase(TestCase):
class C(P): pass
self.assertIsInstance(C(), C)
+ with self.assertRaises(TypeError):
+ C(42)
+
T = TypeVar('T')
class PG(Protocol[T]): pass
class CG(PG[T]): pass
self.assertIsInstance(CG[int](), CG)
+ with self.assertRaises(TypeError):
+ CG[int](42)
def test_cannot_instantiate_abstract(self):
@runtime_checkable
self.assertEqual(C[int]().test, 'OK')
+ class B:
+ def __init__(self):
+ self.test = 'OK'
+
+ class D1(B, P[T]):
+ pass
+
+ self.assertEqual(D1[int]().test, 'OK')
+
+ class D2(P[T], B):
+ pass
+
+ self.assertEqual(D2[int]().test, 'OK')
+
+ def test_new_called(self):
+ T = TypeVar('T')
+
+ class P(Protocol[T]): pass
+
+ class C(P[T]):
+ def __new__(cls, *args):
+ self = super().__new__(cls, *args)
+ self.test = 'OK'
+ return self
+
+ self.assertEqual(C[int]().test, 'OK')
+ with self.assertRaises(TypeError):
+ C[int](42)
+ with self.assertRaises(TypeError):
+ C[int](a=42)
+
def test_protocols_bad_subscripts(self):
T = TypeVar('T')
S = TypeVar('S')
x: int
y: int
+class Bar(_typed_dict_helper.Foo, total=False):
+ b: int
+
class LabelPoint2D(Point2D, Label): ...
class Options(TypedDict, total=False):
'voice': str,
}
+ def test_get_type_hints(self):
+ self.assertEqual(
+ get_type_hints(Bar),
+ {'a': typing.Optional[int], 'b': int}
+ )
+
class IOTests(BaseTestCase):
from test import support
from test.support import script_helper, ALWAYS_EQ
+from test.support import gc_collect
# Used in ReferencesTestCase.test_ref_created_during_del() .
ref_from_del = None
ref1 = weakref.ref(o, self.callback)
ref2 = weakref.ref(o, self.callback)
del o
+ gc_collect() # For PyPy or other GCs.
self.assertIsNone(ref1(), "expected reference to be invalidated")
self.assertIsNone(ref2(), "expected reference to be invalidated")
self.assertEqual(self.cbcalled, 2,
ref1 = weakref.proxy(o, self.callback)
ref2 = weakref.proxy(o, self.callback)
del o
+ gc_collect() # For PyPy or other GCs.
def check(proxy):
proxy.bar
self.assertRaises(ReferenceError, check, ref1)
self.assertRaises(ReferenceError, check, ref2)
- self.assertRaises(ReferenceError, bool, weakref.proxy(C()))
+ ref3 = weakref.proxy(C())
+ gc_collect() # For PyPy or other GCs.
+ self.assertRaises(ReferenceError, bool, ref3)
self.assertEqual(self.cbcalled, 2)
def check_basic_ref(self, factory):
o = factory()
ref = weakref.ref(o, self.callback)
del o
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(self.cbcalled, 1,
"callback did not properly set 'cbcalled'")
self.assertIsNone(ref(),
self.assertEqual(weakref.getweakrefcount(o), 2,
"wrong weak ref count for object")
del proxy
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(weakref.getweakrefcount(o), 1,
"wrong weak ref count for object after deleting proxy")
# can be killed in the middle of the call
"blech" in p
+ def test_proxy_next(self):
+ arr = [4, 5, 6]
+ def iterator_func():
+ yield from arr
+ it = iterator_func()
+
+ class IteratesWeakly:
+ def __iter__(self):
+ return weakref.proxy(it)
+
+ weak_it = IteratesWeakly()
+
+ # Calls proxy.__next__
+ self.assertEqual(list(weak_it), [4, 5, 6])
+
+ def test_proxy_bad_next(self):
+ # bpo-44720: PyIter_Next() shouldn't be called if the reference
+ # isn't an iterator.
+
+ not_an_iterator = lambda: 0
+
+ class A:
+ def __iter__(self):
+ return weakref.proxy(not_an_iterator)
+ a = A()
+
+ msg = "Weakref proxy referenced a non-iterator"
+ with self.assertRaisesRegex(TypeError, msg):
+ list(a)
+
def test_proxy_reversed(self):
class MyObj:
def __len__(self):
self.assertEqual("".join(reversed(weakref.proxy(obj))), "cba")
def test_proxy_hash(self):
- cool_hash = 299_792_458
-
class MyObj:
def __hash__(self):
- return cool_hash
+ return 42
+
+ obj = MyObj()
+ with self.assertRaises(TypeError):
+ hash(weakref.proxy(obj))
+
+ class MyObj:
+ __hash__ = None
obj = MyObj()
- self.assertEqual(hash(weakref.proxy(obj)), cool_hash)
+ with self.assertRaises(TypeError):
+ hash(weakref.proxy(obj))
def test_getweakrefcount(self):
o = C()
"got wrong number of weak reference objects")
del ref1, ref2, proxy1, proxy2
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(weakref.getweakrefcount(o), 0,
"weak reference objects not unlinked from"
" referent when discarded.")
ref1 = weakref.ref(o, self.callback)
ref2 = weakref.ref(o, self.callback)
del ref1
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(weakref.getweakrefs(o), [ref2],
"list of refs does not match")
ref1 = weakref.ref(o, self.callback)
ref2 = weakref.ref(o, self.callback)
del ref2
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(weakref.getweakrefs(o), [ref1],
"list of refs does not match")
del ref1
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(weakref.getweakrefs(o), [],
"list of refs not cleared")
self.assertTrue(mr.called)
self.assertEqual(mr.value, 24)
del o
+ gc_collect() # For PyPy or other GCs.
self.assertIsNone(mr())
self.assertTrue(mr.called)
del items1, items2
self.assertEqual(len(dict), self.COUNT)
del objects[0]
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(len(dict), self.COUNT - 1,
"deleting object did not cause dictionary update")
del objects, o
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(len(dict), 0,
"deleting the values did not clear the dictionary")
# regression on SF bug #447152:
dict = weakref.WeakValueDictionary()
self.assertRaises(KeyError, dict.__getitem__, 1)
dict[2] = C()
+ gc_collect() # For PyPy or other GCs.
self.assertRaises(KeyError, dict.__getitem__, 2)
def test_weak_keys(self):
del items1, items2
self.assertEqual(len(dict), self.COUNT)
del objects[0]
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(len(dict), (self.COUNT - 1),
"deleting object did not cause dictionary update")
del objects, o
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(len(dict), 0,
"deleting the keys did not clear the dictionary")
o = Object(42)
for o in objs:
count += 1
del d[o]
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(len(d), 0)
self.assertEqual(count, 2)
libreftest = """ Doctest for examples in the library reference: weakref.rst
+>>> from test.support import gc_collect
>>> import weakref
>>> class Dict(dict):
... pass
>>> o is o2
True
>>> del o, o2
+>>> gc_collect() # For PyPy or other GCs.
>>> print(r())
None
>>> id2obj(a_id) is a
True
>>> del a
+>>> gc_collect() # For PyPy or other GCs.
>>> try:
... id2obj(a_id)
... except KeyError:
from collections.abc import Set, MutableSet
import gc
import contextlib
+from test import support
class Foo:
self.assertEqual(len(self.s), len(self.d))
self.assertEqual(len(self.fs), 1)
del self.obj
+ support.gc_collect() # For PyPy or other GCs.
self.assertEqual(len(self.fs), 0)
def test_contains(self):
self.assertNotIn(1, self.s)
self.assertIn(self.obj, self.fs)
del self.obj
+ support.gc_collect() # For PyPy or other GCs.
self.assertNotIn(ustr('F'), self.fs)
def test_union(self):
self.assertEqual(self.s, dup)
self.assertRaises(TypeError, self.s.add, [])
self.fs.add(Foo())
+ support.gc_collect() # For PyPy or other GCs.
self.assertTrue(len(self.fs) == 1)
self.fs.add(self.obj)
self.assertTrue(len(self.fs) == 1)
n1 = len(s)
del it
gc.collect()
+ gc.collect() # For PyPy or other GCs.
n2 = len(s)
# one item may be kept alive inside the iterator
self.assertIn(n1, (0, 1))
wref = weakref.ref(e, wref_cb)
self.assertEqual(wref().tag, 'e')
del e
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(flag, True)
self.assertEqual(wref(), None)
def make_pyc(co, mtime, size):
data = marshal.dumps(co)
- if type(mtime) is type(0.0):
- # Mac mtimes need a bit of special casing
- if mtime < 0x7fffffff:
- mtime = int(mtime)
- else:
- mtime = int(-0x100000000 + int(mtime))
pyc = (importlib.util.MAGIC_NUMBER +
- struct.pack("<iii", 0, int(mtime), size & 0xFFFFFFFF) + data)
+ struct.pack("<iLL", 0,
+ int(mtime) & 0xFFFF_FFFF, size & 0xFFFF_FFFF) + data)
return pyc
def module_path_to_dotted_name(path):
TESTMOD + pyc_ext: (NOW, badtime_pyc)}
self.doTest(".py", files, TESTMOD)
+ def test2038MTime(self):
+ # Make sure we can handle mtimes larger than what a 32-bit signed number
+ # can hold.
+ twenty_thirty_eight_pyc = make_pyc(test_co, 2**32 - 1, len(test_src))
+ files = {TESTMOD + ".py": (NOW, test_src),
+ TESTMOD + pyc_ext: (NOW, twenty_thirty_eight_pyc)}
+ self.doTest(".py", files, TESTMOD)
+
def testPackage(self):
packdir = TESTPACK + os.sep
files = {packdir + "__init__" + pyc_ext: (NOW, test_pyc),
self.assertEqual(image.height(), 16)
self.assertIn('::img::test', self.root.image_names())
del image
+ support.gc_collect() # For PyPy or other GCs.
self.assertNotIn('::img::test', self.root.image_names())
def test_create_from_data(self):
self.assertEqual(image.height(), 16)
self.assertIn('::img::test', self.root.image_names())
del image
+ support.gc_collect() # For PyPy or other GCs.
self.assertNotIn('::img::test', self.root.image_names())
def assertEqualStrList(self, actual, expected):
self.assertEqual(image['file'], testfile)
self.assertIn('::img::test', self.root.image_names())
del image
+ support.gc_collect() # For PyPy or other GCs.
self.assertNotIn('::img::test', self.root.image_names())
def check_create_from_data(self, ext):
self.assertEqual(image['file'], '')
self.assertIn('::img::test', self.root.image_names())
del image
+ support.gc_collect() # For PyPy or other GCs.
self.assertNotIn('::img::test', self.root.image_names())
def test_create_from_ppm_file(self):
import unittest
+from test import support
+
import gc
import tkinter
from tkinter import (Variable, StringVar, IntVar, DoubleVar, BooleanVar, Tcl,
v = Variable(self.root, "sample string", "varname")
self.assertTrue(self.info_exists("varname"))
del v
+ support.gc_collect() # For PyPy or other GCs.
self.assertFalse(self.info_exists("varname"))
def test_dont_unset_not_existing(self):
v1 = Variable(self.root, name="name")
v2 = Variable(self.root, name="name")
del v1
+ support.gc_collect() # For PyPy or other GCs.
self.assertFalse(self.info_exists("name"))
# shouldn't raise exception
del v2
+ support.gc_collect() # For PyPy or other GCs.
self.assertFalse(self.info_exists("name"))
def test_equality(self):
import unittest
import tkinter
from tkinter import ttk
-from test.support import requires, run_unittest
+from test.support import requires, run_unittest, gc_collect
from tkinter.test.support import AbstractTkTest, AbstractDefaultRootTest
requires('gui')
x = ttk.LabeledScale(self.root)
var = x._variable._name
x.destroy()
+ gc_collect() # For PyPy or other GCs.
self.assertRaises(tkinter.TclError, x.tk.globalgetvar, var)
# manually created variable
else:
self.assertEqual(float(x.tk.globalgetvar(name)), myvar.get())
del myvar
+ gc_collect() # For PyPy or other GCs.
self.assertRaises(tkinter.TclError, x.tk.globalgetvar, name)
# checking that the tracing callback is properly removed
def test_resize(self):
x = ttk.LabeledScale(self.root)
x.pack(expand=True, fill='both')
+ gc_collect() # For PyPy or other GCs.
x.update()
width, height = x.master.winfo_width(), x.master.winfo_height()
optmenu.destroy()
self.assertEqual(optmenu.tk.globalgetvar(name), var.get())
del var
+ gc_collect() # For PyPy or other GCs.
self.assertRaises(tkinter.TclError, optmenu.tk.globalgetvar, name)
# check that variable is updated correctly
optmenu.pack()
+ gc_collect() # For PyPy or other GCs.
optmenu['menu'].invoke(0)
self.assertEqual(optmenu._variable.get(), items[0])
import unittest
import tkinter
from tkinter import ttk, TclError
-from test.support import requires
+from test.support import requires, gc_collect
import sys
from tkinter.test.test_ttk.test_functions import MockTclObj
self.assertEqual(conv(self.scale.get()), var.get())
self.assertEqual(conv(self.scale.get()), max + 5)
del var
+ gc_collect() # For PyPy or other GCs.
# the same happens with the value option
self.scale['value'] = max + 10
pos += 1
# Add an implicit NEWLINE if the input doesn't end in one
- if last_line and last_line[-1] not in '\r\n':
+ if last_line and last_line[-1] not in '\r\n' and not last_line.strip().startswith("#"):
yield TokenInfo(NEWLINE, '', (lnum - 1, len(last_line)), (lnum - 1, len(last_line) + 1), '')
for indent in indents[1:]: # pop remaining indent levels
yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
_seen.add(id(exc_value))
# Gracefully handle (the way Python 2.4 and earlier did) the case of
# being called with no type or value (None, None, None).
- if (exc_value and exc_value.__cause__ is not None
- and id(exc_value.__cause__) not in _seen):
- cause = TracebackException(
- type(exc_value.__cause__),
- exc_value.__cause__,
- exc_value.__cause__.__traceback__,
- limit=limit,
- lookup_lines=False,
- capture_locals=capture_locals,
- _seen=_seen)
- else:
+ self._truncated = False
+ try:
+ if (exc_value and exc_value.__cause__ is not None
+ and id(exc_value.__cause__) not in _seen):
+ cause = TracebackException(
+ type(exc_value.__cause__),
+ exc_value.__cause__,
+ exc_value.__cause__.__traceback__,
+ limit=limit,
+ lookup_lines=False,
+ capture_locals=capture_locals,
+ _seen=_seen)
+ else:
+ cause = None
+ if (exc_value and exc_value.__context__ is not None
+ and id(exc_value.__context__) not in _seen):
+ context = TracebackException(
+ type(exc_value.__context__),
+ exc_value.__context__,
+ exc_value.__context__.__traceback__,
+ limit=limit,
+ lookup_lines=False,
+ capture_locals=capture_locals,
+ _seen=_seen)
+ else:
+ context = None
+ except RecursionError:
+ # The recursive call to the constructors above
+ # may result in a stack overflow for long exception chains,
+ # so we must truncate.
+ self._truncated = True
cause = None
- if (exc_value and exc_value.__context__ is not None
- and id(exc_value.__context__) not in _seen):
- context = TracebackException(
- type(exc_value.__context__),
- exc_value.__context__,
- exc_value.__context__.__traceback__,
- limit=limit,
- lookup_lines=False,
- capture_locals=capture_locals,
- _seen=_seen)
- else:
context = None
self.__cause__ = cause
self.__context__ = context
not self.__suppress_context__):
yield from self.__context__.format(chain=chain)
yield _context_message
+ if self._truncated:
+ yield (
+ 'Chained exceptions have been truncated to avoid '
+ 'stack overflow in traceback formatting:\n')
if self.stack:
yield 'Traceback (most recent call last):\n'
yield from self.stack.format()
----- turtle.py
This module is an extended reimplementation of turtle.py from the
-Python standard distribution up to Python 2.5. (See: http://www.python.org)
+Python standard distribution up to Python 2.5. (See: https://www.python.org)
It tries to keep the merits of turtle.py and to be (nearly) 100%
compatible with it. This means in the first place to enable the
# legitimate imports of those modules.
-def _type_convert(arg):
+def _type_convert(arg, module=None):
"""For converting None to type(None), and strings to ForwardRef."""
if arg is None:
return type(None)
if isinstance(arg, str):
- return ForwardRef(arg)
+ return ForwardRef(arg, module=module)
return arg
-def _type_check(arg, msg, is_argument=True):
+def _type_check(arg, msg, is_argument=True, module=None):
"""Check that the argument is a type, and return it (internal helper).
As a special case, accept None and return type(None) instead. Also wrap strings
if is_argument:
invalid_generic_forms = invalid_generic_forms + (ClassVar, Final)
- arg = _type_convert(arg)
+ arg = _type_convert(arg, module=module)
if (isinstance(arg, _GenericAlias) and
arg.__origin__ in invalid_generic_forms):
raise TypeError(f"{arg} is not valid as type argument")
__slots__ = ('__forward_arg__', '__forward_code__',
'__forward_evaluated__', '__forward_value__',
- '__forward_is_argument__')
+ '__forward_is_argument__', '__forward_module__')
- def __init__(self, arg, is_argument=True):
+ def __init__(self, arg, is_argument=True, module=None):
if not isinstance(arg, str):
raise TypeError(f"Forward reference must be a string -- got {arg!r}")
try:
self.__forward_evaluated__ = False
self.__forward_value__ = None
self.__forward_is_argument__ = is_argument
+ self.__forward_module__ = module
def _evaluate(self, globalns, localns, recursive_guard):
if self.__forward_arg__ in recursive_guard:
globalns = localns
elif localns is None:
localns = globalns
+ if self.__forward_module__ is not None:
+ globalns = getattr(
+ sys.modules.get(self.__forward_module__, None), '__dict__', globalns
+ )
type_ =_type_check(
eval(self.__forward_code__, globalns, localns),
"Forward references must evaluate to types.",
def _no_init(self, *args, **kwargs):
- if type(self)._is_protocol:
- raise TypeError('Protocols cannot be instantiated')
+ raise TypeError('Protocols cannot be instantiated')
def _allow_reckless_class_cheks():
# We have nothing more to do for non-protocols...
if not cls._is_protocol:
+ if cls.__init__ == _no_init:
+ for base in cls.__mro__:
+ init = base.__dict__.get('__init__', _no_init)
+ if init != _no_init:
+ cls.__init__ = init
+ break
+ else:
+ # should not happen
+ cls.__init__ = object.__init__
return
# ... otherwise check consistency of bases, and prohibit instantiation.
own_annotation_keys = set(own_annotations.keys())
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
own_annotations = {
- n: _type_check(tp, msg) for n, tp in own_annotations.items()
+ n: _type_check(tp, msg, module=tp_dict.__module__)
+ for n, tp in own_annotations.items()
}
required_keys = set()
optional_keys = set()
function(*args, **kwargs)
def run(self, result=None):
- orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
+ stopTestRun = getattr(result, 'stopTestRun', None)
if startTestRun is not None:
startTestRun()
+ else:
+ stopTestRun = None
result.startTest(self)
-
- testMethod = getattr(self, self._testMethodName)
- if (getattr(self.__class__, "__unittest_skip__", False) or
- getattr(testMethod, "__unittest_skip__", False)):
- # If the class or method was skipped.
- try:
+ try:
+ testMethod = getattr(self, self._testMethodName)
+ if (getattr(self.__class__, "__unittest_skip__", False) or
+ getattr(testMethod, "__unittest_skip__", False)):
+ # If the class or method was skipped.
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, self, skip_why)
- finally:
- result.stopTest(self)
- return
- expecting_failure_method = getattr(testMethod,
- "__unittest_expecting_failure__", False)
- expecting_failure_class = getattr(self,
- "__unittest_expecting_failure__", False)
- expecting_failure = expecting_failure_class or expecting_failure_method
- outcome = _Outcome(result)
- try:
- self._outcome = outcome
+ return result
+
+ expecting_failure = (
+ getattr(self, "__unittest_expecting_failure__", False) or
+ getattr(testMethod, "__unittest_expecting_failure__", False)
+ )
+ outcome = _Outcome(result)
+ try:
+ self._outcome = outcome
- with outcome.testPartExecutor(self):
- self._callSetUp()
- if outcome.success:
- outcome.expecting_failure = expecting_failure
- with outcome.testPartExecutor(self, isTest=True):
- self._callTestMethod(testMethod)
- outcome.expecting_failure = False
with outcome.testPartExecutor(self):
- self._callTearDown()
-
- self.doCleanups()
- for test, reason in outcome.skipped:
- self._addSkip(result, test, reason)
- self._feedErrorsToResult(result, outcome.errors)
- if outcome.success:
- if expecting_failure:
- if outcome.expectedFailure:
- self._addExpectedFailure(result, outcome.expectedFailure)
+ self._callSetUp()
+ if outcome.success:
+ outcome.expecting_failure = expecting_failure
+ with outcome.testPartExecutor(self, isTest=True):
+ self._callTestMethod(testMethod)
+ outcome.expecting_failure = False
+ with outcome.testPartExecutor(self):
+ self._callTearDown()
+
+ self.doCleanups()
+ for test, reason in outcome.skipped:
+ self._addSkip(result, test, reason)
+ self._feedErrorsToResult(result, outcome.errors)
+ if outcome.success:
+ if expecting_failure:
+ if outcome.expectedFailure:
+ self._addExpectedFailure(result, outcome.expectedFailure)
+ else:
+ self._addUnexpectedSuccess(result)
else:
- self._addUnexpectedSuccess(result)
- else:
- result.addSuccess(self)
- return result
+ result.addSuccess(self)
+ return result
+ finally:
+ # explicitly break reference cycles:
+ # outcome.errors -> frame -> outcome -> outcome.errors
+ # outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
+ outcome.errors.clear()
+ outcome.expectedFailure = None
+
+ # clear the outcome, no more needed
+ self._outcome = None
+
finally:
result.stopTest(self)
- if orig_result is None:
- stopTestRun = getattr(result, 'stopTestRun', None)
- if stopTestRun is not None:
- stopTestRun()
-
- # explicitly break reference cycles:
- # outcome.errors -> frame -> outcome -> outcome.errors
- # outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
- outcome.errors.clear()
- outcome.expectedFailure = None
-
- # clear the outcome, no more needed
- self._outcome = None
+ if stopTestRun is not None:
+ stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
if getattr(currentClass, "__unittest_skip__", False):
return
+ failed = False
try:
currentClass._classSetupFailed = False
except TypeError:
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
+ doClassCleanups = getattr(currentClass, 'doClassCleanups', None)
if setUpClass is not None:
_call_if_exists(result, '_setupStdout')
try:
- setUpClass()
- except Exception as e:
- if isinstance(result, _DebugResult):
- raise
- currentClass._classSetupFailed = True
- className = util.strclass(currentClass)
- self._createClassOrModuleLevelException(result, e,
- 'setUpClass',
- className)
+ try:
+ setUpClass()
+ except Exception as e:
+ if isinstance(result, _DebugResult):
+ raise
+ failed = True
+ try:
+ currentClass._classSetupFailed = True
+ except TypeError:
+ pass
+ className = util.strclass(currentClass)
+ self._createClassOrModuleLevelException(result, e,
+ 'setUpClass',
+ className)
+ if failed and doClassCleanups is not None:
+ doClassCleanups()
+ for exc_info in currentClass.tearDown_exceptions:
+ self._createClassOrModuleLevelException(
+ result, exc_info[1], 'setUpClass', className,
+ info=exc_info)
finally:
_call_if_exists(result, '_restoreStdout')
- if currentClass._classSetupFailed is True:
- currentClass.doClassCleanups()
- if len(currentClass.tearDown_exceptions) > 0:
- for exc in currentClass.tearDown_exceptions:
- self._createClassOrModuleLevelException(
- result, exc[1], 'setUpClass', className,
- info=exc)
def _get_previous_module(self, result):
previousModule = None
if setUpModule is not None:
_call_if_exists(result, '_setupStdout')
try:
- setUpModule()
- except Exception as e:
try:
- case.doModuleCleanups()
- except Exception as exc:
- self._createClassOrModuleLevelException(result, exc,
+ setUpModule()
+ except Exception as e:
+ if isinstance(result, _DebugResult):
+ raise
+ result._moduleSetUpFailed = True
+ self._createClassOrModuleLevelException(result, e,
'setUpModule',
currentModule)
- if isinstance(result, _DebugResult):
- raise
- result._moduleSetUpFailed = True
- self._createClassOrModuleLevelException(result, e,
- 'setUpModule',
- currentModule)
+ if result._moduleSetUpFailed:
+ try:
+ case.doModuleCleanups()
+ except Exception as e:
+ self._createClassOrModuleLevelException(result, e,
+ 'setUpModule',
+ currentModule)
finally:
_call_if_exists(result, '_restoreStdout')
except KeyError:
return
- tearDownModule = getattr(module, 'tearDownModule', None)
- if tearDownModule is not None:
- _call_if_exists(result, '_setupStdout')
+ _call_if_exists(result, '_setupStdout')
+ try:
+ tearDownModule = getattr(module, 'tearDownModule', None)
+ if tearDownModule is not None:
+ try:
+ tearDownModule()
+ except Exception as e:
+ if isinstance(result, _DebugResult):
+ raise
+ self._createClassOrModuleLevelException(result, e,
+ 'tearDownModule',
+ previousModule)
try:
- tearDownModule()
+ case.doModuleCleanups()
except Exception as e:
if isinstance(result, _DebugResult):
raise
self._createClassOrModuleLevelException(result, e,
'tearDownModule',
previousModule)
- finally:
- _call_if_exists(result, '_restoreStdout')
- try:
- case.doModuleCleanups()
- except Exception as e:
- self._createClassOrModuleLevelException(result, e,
- 'tearDownModule',
- previousModule)
+ finally:
+ _call_if_exists(result, '_restoreStdout')
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
- if currentClass == previousClass:
+ if currentClass == previousClass or previousClass is None:
return
if getattr(previousClass, '_classSetupFailed', False):
return
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
- if tearDownClass is not None:
- _call_if_exists(result, '_setupStdout')
- try:
- tearDownClass()
- except Exception as e:
- if isinstance(result, _DebugResult):
- raise
- className = util.strclass(previousClass)
- self._createClassOrModuleLevelException(result, e,
- 'tearDownClass',
- className)
- finally:
- _call_if_exists(result, '_restoreStdout')
- previousClass.doClassCleanups()
- if len(previousClass.tearDown_exceptions) > 0:
- for exc in previousClass.tearDown_exceptions:
- className = util.strclass(previousClass)
- self._createClassOrModuleLevelException(result, exc[1],
- 'tearDownClass',
- className,
- info=exc)
+ doClassCleanups = getattr(previousClass, 'doClassCleanups', None)
+ if tearDownClass is None and doClassCleanups is None:
+ return
+
+ _call_if_exists(result, '_setupStdout')
+ try:
+ if tearDownClass is not None:
+ try:
+ tearDownClass()
+ except Exception as e:
+ if isinstance(result, _DebugResult):
+ raise
+ className = util.strclass(previousClass)
+ self._createClassOrModuleLevelException(result, e,
+ 'tearDownClass',
+ className)
+ if doClassCleanups is not None:
+ doClassCleanups()
+ for exc_info in previousClass.tearDown_exceptions:
+ if isinstance(result, _DebugResult):
+ raise exc_info[1]
+ className = util.strclass(previousClass)
+ self._createClassOrModuleLevelException(result, exc_info[1],
+ 'tearDownClass',
+ className,
+ info=exc_info)
+ finally:
+ _call_if_exists(result, '_restoreStdout')
class _ErrorHolder(object):
import warnings
import weakref
import unittest
+from test.support import gc_collect
from itertools import product
self.foo()
Foo("test_functional").run()
+ gc_collect() # For PyPy or other GCs.
self.assertIsNone(wr())
Foo("test_with").run()
+ gc_collect() # For PyPy or other GCs.
self.assertIsNone(wr())
def testAssertNotRegex(self):
TestEquality, TestHashing, LoggingResult, LegacyLoggingResult,
ResultWithNoStartTestRunStopTestRun
)
-from test.support import captured_stderr
+from test.support import captured_stderr, gc_collect
log_foo = logging.getLogger('foo')
for method_name in ('test1', 'test2'):
testcase = TestCase(method_name)
testcase.run()
+ gc_collect() # For PyPy or other GCs.
self.assertEqual(MyException.ninstance, 0)
import textwrap
from test import support
+from test.support import captured_stdout
import traceback
import unittest
+from unittest.util import strclass
class MockTraceback(object):
unittest.result.traceback = traceback
+def bad_cleanup1():
+ print('do cleanup1')
+ raise TypeError('bad cleanup1')
+
+
+def bad_cleanup2():
+ print('do cleanup2')
+ raise ValueError('bad cleanup2')
+
+
class Test_TestResult(unittest.TestCase):
# Note: there are not separate tests for TestResult.wasSuccessful(),
# TestResult.errors, TestResult.failures, TestResult.testsRun or
self.assertEqual(result._original_stderr.getvalue(), expectedErrMessage)
self.assertMultiLineEqual(message, expectedFullMessage)
+ def testBufferSetUp(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ def setUp(self):
+ print('set up')
+ 1/0
+ def test_foo(self):
+ pass
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\nset up\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 1)
+ description = f'test_foo ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferTearDown(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ def tearDown(self):
+ print('tear down')
+ 1/0
+ def test_foo(self):
+ pass
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\ntear down\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 1)
+ description = f'test_foo ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferDoCleanups(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ def setUp(self):
+ print('set up')
+ self.addCleanup(bad_cleanup1)
+ self.addCleanup(bad_cleanup2)
+ def test_foo(self):
+ pass
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\nset up\ndo cleanup2\ndo cleanup1\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 2)
+ description = f'test_foo ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('ValueError: bad cleanup2', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+ test_case, formatted_exc = result.errors[1]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('TypeError: bad cleanup1', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferSetUp_DoCleanups(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ def setUp(self):
+ print('set up')
+ self.addCleanup(bad_cleanup1)
+ self.addCleanup(bad_cleanup2)
+ 1/0
+ def test_foo(self):
+ pass
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\nset up\ndo cleanup2\ndo cleanup1\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 3)
+ description = f'test_foo ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+ test_case, formatted_exc = result.errors[1]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('ValueError: bad cleanup2', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+ test_case, formatted_exc = result.errors[2]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('TypeError: bad cleanup1', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferTearDown_DoCleanups(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ def setUp(self):
+ print('set up')
+ self.addCleanup(bad_cleanup1)
+ self.addCleanup(bad_cleanup2)
+ def tearDown(self):
+ print('tear down')
+ 1/0
+ def test_foo(self):
+ pass
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\nset up\ntear down\ndo cleanup2\ndo cleanup1\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 3)
+ description = f'test_foo ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+ test_case, formatted_exc = result.errors[1]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('ValueError: bad cleanup2', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+ test_case, formatted_exc = result.errors[2]
+ self.assertEqual(str(test_case), description)
+ self.assertIn('TypeError: bad cleanup1', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
def testBufferSetupClass(self):
- result = unittest.TestResult()
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
@classmethod
def setUpClass(cls):
+ print('set up class')
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
+ expected_out = '\nStdout:\nset up class\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
+ description = f'setUpClass ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
def testBufferTearDownClass(self):
- result = unittest.TestResult()
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
@classmethod
def tearDownClass(cls):
+ print('tear down class')
1/0
def test_foo(self):
pass
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
+ expected_out = '\nStdout:\ntear down class\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
+ description = f'tearDownClass ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferDoClassCleanups(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ print('set up class')
+ cls.addClassCleanup(bad_cleanup1)
+ cls.addClassCleanup(bad_cleanup2)
+ @classmethod
+ def tearDownClass(cls):
+ print('tear down class')
+ def test_foo(self):
+ pass
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\ntear down class\ndo cleanup2\ndo cleanup1\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 2)
+ description = f'tearDownClass ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ValueError: bad cleanup2', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+ test_case, formatted_exc = result.errors[1]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('TypeError: bad cleanup1', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferSetupClass_DoClassCleanups(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ print('set up class')
+ cls.addClassCleanup(bad_cleanup1)
+ cls.addClassCleanup(bad_cleanup2)
+ 1/0
+ def test_foo(self):
+ pass
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\nset up class\ndo cleanup2\ndo cleanup1\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 3)
+ description = f'setUpClass ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn('\nStdout:\nset up class\n', formatted_exc)
+ test_case, formatted_exc = result.errors[1]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ValueError: bad cleanup2', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+ test_case, formatted_exc = result.errors[2]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('TypeError: bad cleanup1', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferTearDownClass_DoClassCleanups(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ print('set up class')
+ cls.addClassCleanup(bad_cleanup1)
+ cls.addClassCleanup(bad_cleanup2)
+ @classmethod
+ def tearDownClass(cls):
+ print('tear down class')
+ 1/0
+ def test_foo(self):
+ pass
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\ntear down class\ndo cleanup2\ndo cleanup1\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 3)
+ description = f'tearDownClass ({strclass(Foo)})'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn('\nStdout:\ntear down class\n', formatted_exc)
+ test_case, formatted_exc = result.errors[1]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ValueError: bad cleanup2', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+ test_case, formatted_exc = result.errors[2]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('TypeError: bad cleanup1', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
def testBufferSetUpModule(self):
- result = unittest.TestResult()
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
class Module(object):
@staticmethod
def setUpModule():
+ print('set up module')
1/0
Foo.__module__ = 'Module'
self.addCleanup(sys.modules.pop, 'Module')
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
+ expected_out = '\nStdout:\nset up module\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
+ description = 'setUpModule (Module)'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
def testBufferTearDownModule(self):
- result = unittest.TestResult()
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
result.buffer = True
class Foo(unittest.TestCase):
class Module(object):
@staticmethod
def tearDownModule():
+ print('tear down module')
1/0
Foo.__module__ = 'Module'
self.addCleanup(sys.modules.pop, 'Module')
suite = unittest.TestSuite([Foo('test_foo')])
suite(result)
+ expected_out = '\nStdout:\ntear down module\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 1)
+ description = 'tearDownModule (Module)'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferDoModuleCleanups(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ def test_foo(self):
+ pass
+ class Module(object):
+ @staticmethod
+ def setUpModule():
+ print('set up module')
+ unittest.addModuleCleanup(bad_cleanup1)
+ unittest.addModuleCleanup(bad_cleanup2)
+
+ Foo.__module__ = 'Module'
+ sys.modules['Module'] = Module
+ self.addCleanup(sys.modules.pop, 'Module')
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\ndo cleanup2\ndo cleanup1\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(len(result.errors), 1)
+ description = 'tearDownModule (Module)'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ValueError: bad cleanup2', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferSetUpModule_DoModuleCleanups(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ def test_foo(self):
+ pass
+ class Module(object):
+ @staticmethod
+ def setUpModule():
+ print('set up module')
+ unittest.addModuleCleanup(bad_cleanup1)
+ unittest.addModuleCleanup(bad_cleanup2)
+ 1/0
+
+ Foo.__module__ = 'Module'
+ sys.modules['Module'] = Module
+ self.addCleanup(sys.modules.pop, 'Module')
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\nset up module\ndo cleanup2\ndo cleanup1\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 2)
+ description = 'setUpModule (Module)'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn('\nStdout:\nset up module\n', formatted_exc)
+ test_case, formatted_exc = result.errors[1]
+ self.assertIn(expected_out, formatted_exc)
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ValueError: bad cleanup2', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
+
+ def testBufferTearDownModule_DoModuleCleanups(self):
+ with captured_stdout() as stdout:
+ result = unittest.TestResult()
+ result.buffer = True
+
+ class Foo(unittest.TestCase):
+ def test_foo(self):
+ pass
+ class Module(object):
+ @staticmethod
+ def setUpModule():
+ print('set up module')
+ unittest.addModuleCleanup(bad_cleanup1)
+ unittest.addModuleCleanup(bad_cleanup2)
+ @staticmethod
+ def tearDownModule():
+ print('tear down module')
+ 1/0
+
+ Foo.__module__ = 'Module'
+ sys.modules['Module'] = Module
+ self.addCleanup(sys.modules.pop, 'Module')
+ suite = unittest.TestSuite([Foo('test_foo')])
+ suite(result)
+ expected_out = '\nStdout:\ntear down module\ndo cleanup2\ndo cleanup1\n'
+ self.assertEqual(stdout.getvalue(), expected_out)
+ self.assertEqual(len(result.errors), 2)
+ description = 'tearDownModule (Module)'
+ test_case, formatted_exc = result.errors[0]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ZeroDivisionError: division by zero', formatted_exc)
+ self.assertNotIn('ValueError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn('\nStdout:\ntear down module\n', formatted_exc)
+ test_case, formatted_exc = result.errors[1]
+ self.assertEqual(test_case.description, description)
+ self.assertIn('ValueError: bad cleanup2', formatted_exc)
+ self.assertNotIn('ZeroDivisionError', formatted_exc)
+ self.assertNotIn('TypeError', formatted_exc)
+ self.assertIn(expected_out, formatted_exc)
if __name__ == '__main__':
self.assertEqual(ordering,
['setUpClass', 'test', 'tearDownClass', 'cleanup_good'])
- def test_debug_executes_classCleanUp(self):
+ def test_run_class_cleanUp_without_tearDownClass(self):
ordering = []
+ blowUp = True
class TestableTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
ordering.append('setUpClass')
cls.addClassCleanup(cleanup, ordering)
+ if blowUp:
+ raise Exception()
+ def testNothing(self):
+ ordering.append('test')
+ @classmethod
+ @property
+ def tearDownClass(cls):
+ raise AttributeError
+
+ runTests(TestableTest)
+ self.assertEqual(ordering, ['setUpClass', 'cleanup_good'])
+
+ ordering = []
+ blowUp = False
+ runTests(TestableTest)
+ self.assertEqual(ordering,
+ ['setUpClass', 'test', 'cleanup_good'])
+
+ def test_debug_executes_classCleanUp(self):
+ ordering = []
+ blowUp = False
+
+ class TestableTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ ordering.append('setUpClass')
+ cls.addClassCleanup(cleanup, ordering, blowUp=blowUp)
def testNothing(self):
ordering.append('test')
@classmethod
self.assertEqual(ordering,
['setUpClass', 'test', 'tearDownClass', 'cleanup_good'])
+ ordering = []
+ blowUp = True
+ suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestableTest)
+ with self.assertRaises(Exception) as cm:
+ suite.debug()
+ self.assertEqual(str(cm.exception), 'CleanUpExc')
+ self.assertEqual(ordering,
+ ['setUpClass', 'test', 'tearDownClass', 'cleanup_exc'])
+
+ def test_debug_executes_classCleanUp_when_teardown_exception(self):
+ ordering = []
+ blowUp = False
+
+ class TestableTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ ordering.append('setUpClass')
+ cls.addClassCleanup(cleanup, ordering, blowUp=blowUp)
+ def testNothing(self):
+ ordering.append('test')
+ @classmethod
+ def tearDownClass(cls):
+ raise Exception('TearDownClassExc')
+
+ suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestableTest)
+ with self.assertRaises(Exception) as cm:
+ suite.debug()
+ self.assertEqual(str(cm.exception), 'TearDownClassExc')
+ self.assertEqual(ordering, ['setUpClass', 'test'])
+ self.assertTrue(TestableTest._class_cleanups)
+ TestableTest._class_cleanups.clear()
+
+ ordering = []
+ blowUp = True
+ suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestableTest)
+ with self.assertRaises(Exception) as cm:
+ suite.debug()
+ self.assertEqual(str(cm.exception), 'TearDownClassExc')
+ self.assertEqual(ordering, ['setUpClass', 'test'])
+ self.assertTrue(TestableTest._class_cleanups)
+ TestableTest._class_cleanups.clear()
+
def test_doClassCleanups_with_errors_addClassCleanUp(self):
class TestableTest(unittest.TestCase):
def testNothing(self):
self.assertEqual(ordering,
['setUpClass', 'setUp', 'test',
'tearDownClass', 'cleanup_exc'])
+
ordering = []
class_blow_up = True
method_blow_up = False
['setUpClass', 'setUp', 'tearDownClass',
'cleanup_exc'])
+ def test_with_errors_in_tearDownClass(self):
+ ordering = []
+ class TestableTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ ordering.append('setUpClass')
+ cls.addClassCleanup(cleanup, ordering)
+ def testNothing(self):
+ ordering.append('test')
+ @classmethod
+ def tearDownClass(cls):
+ ordering.append('tearDownClass')
+ raise Exception('TearDownExc')
+
+ result = runTests(TestableTest)
+ self.assertEqual(result.errors[0][1].splitlines()[-1],
+ 'Exception: TearDownExc')
+ self.assertEqual(ordering,
+ ['setUpClass', 'test', 'tearDownClass', 'cleanup_good'])
+
class TestModuleCleanUp(unittest.TestCase):
def test_add_and_do_ModuleCleanup(self):
'tearDownModule2', 'cleanup_good'])
self.assertEqual(unittest.case._module_cleanups, [])
- def test_debug_module_executes_cleanUp(self):
+ def test_run_module_cleanUp_without_teardown(self):
ordering = []
class Module(object):
@staticmethod
def setUpModule():
ordering.append('setUpModule')
unittest.addModuleCleanup(cleanup, ordering)
+
+ class TestableTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ ordering.append('setUpClass')
+ def testNothing(self):
+ ordering.append('test')
+ @classmethod
+ def tearDownClass(cls):
+ ordering.append('tearDownClass')
+
+ TestableTest.__module__ = 'Module'
+ sys.modules['Module'] = Module
+ runTests(TestableTest)
+ self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test',
+ 'tearDownClass', 'cleanup_good'])
+ self.assertEqual(unittest.case._module_cleanups, [])
+
+ def test_run_module_cleanUp_when_teardown_exception(self):
+ ordering = []
+ class Module(object):
+ @staticmethod
+ def setUpModule():
+ ordering.append('setUpModule')
+ unittest.addModuleCleanup(cleanup, ordering)
+ @staticmethod
+ def tearDownModule():
+ raise Exception('CleanUpExc')
+
+ class TestableTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ ordering.append('setUpClass')
+ def testNothing(self):
+ ordering.append('test')
+ @classmethod
+ def tearDownClass(cls):
+ ordering.append('tearDownClass')
+
+ TestableTest.__module__ = 'Module'
+ sys.modules['Module'] = Module
+ result = runTests(TestableTest)
+ self.assertEqual(result.errors[0][1].splitlines()[-1],
+ 'Exception: CleanUpExc')
+ self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test',
+ 'tearDownClass', 'cleanup_good'])
+ self.assertEqual(unittest.case._module_cleanups, [])
+
+ def test_debug_module_executes_cleanUp(self):
+ ordering = []
+ blowUp = False
+ class Module(object):
+ @staticmethod
+ def setUpModule():
+ ordering.append('setUpModule')
+ unittest.addModuleCleanup(cleanup, ordering, blowUp=blowUp)
@staticmethod
def tearDownModule():
ordering.append('tearDownModule')
'tearDownModule', 'cleanup_good'])
self.assertEqual(unittest.case._module_cleanups, [])
+ ordering = []
+ blowUp = True
+ suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestableTest)
+ with self.assertRaises(Exception) as cm:
+ suite.debug()
+ self.assertEqual(str(cm.exception), 'CleanUpExc')
+ self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test',
+ 'tearDownClass', 'tearDownModule', 'cleanup_exc'])
+ self.assertEqual(unittest.case._module_cleanups, [])
+
+ def test_debug_module_cleanUp_when_teardown_exception(self):
+ ordering = []
+ blowUp = False
+ class Module(object):
+ @staticmethod
+ def setUpModule():
+ ordering.append('setUpModule')
+ unittest.addModuleCleanup(cleanup, ordering, blowUp=blowUp)
+ @staticmethod
+ def tearDownModule():
+ raise Exception('TearDownModuleExc')
+
+ class TestableTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ ordering.append('setUpClass')
+ def testNothing(self):
+ ordering.append('test')
+ @classmethod
+ def tearDownClass(cls):
+ ordering.append('tearDownClass')
+
+ TestableTest.__module__ = 'Module'
+ sys.modules['Module'] = Module
+ suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestableTest)
+ with self.assertRaises(Exception) as cm:
+ suite.debug()
+ self.assertEqual(str(cm.exception), 'TearDownModuleExc')
+ self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test',
+ 'tearDownClass'])
+ self.assertTrue(unittest.case._module_cleanups)
+ unittest.case._module_cleanups.clear()
+
+ ordering = []
+ blowUp = True
+ suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestableTest)
+ with self.assertRaises(Exception) as cm:
+ suite.debug()
+ self.assertEqual(str(cm.exception), 'TearDownModuleExc')
+ self.assertEqual(ordering, ['setUpModule', 'setUpClass', 'test',
+ 'tearDownClass'])
+ self.assertTrue(unittest.case._module_cleanups)
+ unittest.case._module_cleanups.clear()
+
def test_addClassCleanup_arg_errors(self):
cleanups = []
def cleanup(*args, **kwargs):
method_blow_up = False
result = runTests(TestableTest)
self.assertEqual(result.errors[0][1].splitlines()[-1],
- 'Exception: CleanUpExc')
- self.assertEqual(result.errors[1][1].splitlines()[-1],
'Exception: ModuleExc')
+ self.assertEqual(result.errors[1][1].splitlines()[-1],
+ 'Exception: CleanUpExc')
self.assertEqual(ordering, ['setUpModule', 'cleanup_exc'])
ordering = []
def test_skipping(self):
class Foo(unittest.TestCase):
+ def defaultTestResult(self):
+ return LoggingResult(events)
def test_skip_me(self):
self.skipTest("skip")
events = []
result = LoggingResult(events)
test = Foo("test_skip_me")
- test.run(result)
+ self.assertIs(test.run(result), result)
self.assertEqual(events, ['startTest', 'addSkip', 'stopTest'])
self.assertEqual(result.skipped, [(test, "skip")])
+ events = []
+ result = test.run()
+ self.assertEqual(events, ['startTestRun', 'startTest', 'addSkip',
+ 'stopTest', 'stopTestRun'])
+ self.assertEqual(result.skipped, [(test, "skip")])
+ self.assertEqual(result.testsRun, 1)
+
# Try letting setUp skip the test now.
class Foo(unittest.TestCase):
+ def defaultTestResult(self):
+ return LoggingResult(events)
def setUp(self):
self.skipTest("testing")
def test_nothing(self): pass
events = []
result = LoggingResult(events)
test = Foo("test_nothing")
- test.run(result)
+ self.assertIs(test.run(result), result)
self.assertEqual(events, ['startTest', 'addSkip', 'stopTest'])
self.assertEqual(result.skipped, [(test, "testing")])
self.assertEqual(result.testsRun, 1)
+ events = []
+ result = test.run()
+ self.assertEqual(events, ['startTestRun', 'startTest', 'addSkip',
+ 'stopTest', 'stopTestRun'])
+ self.assertEqual(result.skipped, [(test, "testing")])
+ self.assertEqual(result.testsRun, 1)
+
def test_skipping_subtests(self):
class Foo(unittest.TestCase):
+ def defaultTestResult(self):
+ return LoggingResult(events)
def test_skip_me(self):
with self.subTest(a=1):
with self.subTest(b=2):
events = []
result = LoggingResult(events)
test = Foo("test_skip_me")
- test.run(result)
+ self.assertIs(test.run(result), result)
self.assertEqual(events, ['startTest', 'addSkip', 'addSkip',
'addSkip', 'stopTest'])
self.assertEqual(len(result.skipped), 3)
self.assertIsNot(subtest, test)
self.assertEqual(result.skipped[2], (test, "skip 3"))
+ events = []
+ result = test.run()
+ self.assertEqual(events,
+ ['startTestRun', 'startTest', 'addSkip', 'addSkip',
+ 'addSkip', 'stopTest', 'stopTestRun'])
+ self.assertEqual([msg for subtest, msg in result.skipped],
+ ['skip 1', 'skip 2', 'skip 3'])
+
def test_skipping_decorators(self):
op_table = ((unittest.skipUnless, False, True),
(unittest.skipIf, True, False))
for deco, do_skip, dont_skip in op_table:
class Foo(unittest.TestCase):
+ def defaultTestResult(self):
+ return LoggingResult(events)
+
@deco(do_skip, "testing")
def test_skip(self): pass
def test_dont_skip(self): pass
test_do_skip = Foo("test_skip")
test_dont_skip = Foo("test_dont_skip")
+
suite = unittest.TestSuite([test_do_skip, test_dont_skip])
events = []
result = LoggingResult(events)
- suite.run(result)
+ self.assertIs(suite.run(result), result)
self.assertEqual(len(result.skipped), 1)
expected = ['startTest', 'addSkip', 'stopTest',
'startTest', 'addSuccess', 'stopTest']
self.assertEqual(result.skipped, [(test_do_skip, "testing")])
self.assertTrue(result.wasSuccessful())
+ events = []
+ result = test_do_skip.run()
+ self.assertEqual(events, ['startTestRun', 'startTest', 'addSkip',
+ 'stopTest', 'stopTestRun'])
+ self.assertEqual(result.skipped, [(test_do_skip, "testing")])
+
+ events = []
+ result = test_dont_skip.run()
+ self.assertEqual(events, ['startTestRun', 'startTest', 'addSuccess',
+ 'stopTest', 'stopTestRun'])
+ self.assertEqual(result.skipped, [])
+
def test_skip_class(self):
@unittest.skip("testing")
class Foo(unittest.TestCase):
+ def defaultTestResult(self):
+ return LoggingResult(events)
def test_1(self):
record.append(1)
+ events = []
record = []
- result = unittest.TestResult()
+ result = LoggingResult(events)
test = Foo("test_1")
suite = unittest.TestSuite([test])
- suite.run(result)
+ self.assertIs(suite.run(result), result)
+ self.assertEqual(events, ['startTest', 'addSkip', 'stopTest'])
+ self.assertEqual(result.skipped, [(test, "testing")])
+ self.assertEqual(record, [])
+
+ events = []
+ result = test.run()
+ self.assertEqual(events, ['startTestRun', 'startTest', 'addSkip',
+ 'stopTest', 'stopTestRun'])
self.assertEqual(result.skipped, [(test, "testing")])
self.assertEqual(record, [])
result = unittest.TestResult()
test = Foo("test_1")
suite = unittest.TestSuite([test])
- suite.run(result)
+ self.assertIs(suite.run(result), result)
self.assertEqual(result.skipped, [(test, "testing")])
self.assertEqual(record, [])
events = []
result = LoggingResult(events)
test = Foo("test_die")
- test.run(result)
+ self.assertIs(test.run(result), result)
self.assertEqual(events,
['startTest', 'addExpectedFailure', 'stopTest'])
self.assertEqual(result.expectedFailures[0][0], test)
events = []
result = LoggingResult(events)
test = Foo("test_1")
- test.run(result)
+ self.assertIs(test.run(result), result)
self.assertEqual(events,
['startTest', 'addExpectedFailure', 'stopTest'])
self.assertEqual(result.expectedFailures[0][0], test)
events = []
result = LoggingResult(events)
test = Bar("test_1")
- test.run(result)
+ self.assertIs(test.run(result), result)
self.assertEqual(events,
['startTest', 'addExpectedFailure', 'stopTest'])
self.assertEqual(result.expectedFailures[0][0], test)
events = []
result = LoggingResult(events)
test = Foo("test_die")
- test.run(result)
+ self.assertIs(test.run(result), result)
self.assertEqual(events,
['startTest', 'addSubTestSuccess',
'addExpectedFailure', 'stopTest'])
events = []
result = LoggingResult(events)
test = Foo("test_die")
- test.run(result)
+ self.assertIs(test.run(result), result)
self.assertEqual(events,
['startTest', 'addUnexpectedSuccess', 'stopTest'])
self.assertFalse(result.failures)
events = []
result = LoggingResult(events)
test = Foo("test_die")
- test.run(result)
+ self.assertIs(test.run(result), result)
self.assertEqual(events,
['startTest',
'addSubTestSuccess', 'addSubTestSuccess',
result = unittest.TestResult()
test = Foo("test_1")
suite = unittest.TestSuite([test])
- suite.run(result)
+ self.assertIs(suite.run(result), result)
self.assertEqual(result.skipped, [(test, "testing")])
self.assertFalse(Foo.wasSetUp)
self.assertFalse(Foo.wasTornDown)
result = unittest.TestResult()
test = Foo("test_1")
suite = unittest.TestSuite([test])
- suite.run(result)
+ self.assertIs(suite.run(result), result)
self.assertEqual(result.skipped, [(test, "testing")])
def test_skip_without_reason(self):
result = unittest.TestResult()
test = Foo("test_1")
suite = unittest.TestSuite([test])
- suite.run(result)
+ self.assertIs(suite.run(result), result)
self.assertEqual(result.skipped, [(test, "")])
if __name__ == "__main__":
# install it
urllib.request.install_opener(opener)
-f = urllib.request.urlopen('http://www.python.org/')
+f = urllib.request.urlopen('https://www.python.org/')
"""
# XXX issues:
This module is an implementation of PEP 205:
-http://www.python.org/dev/peps/pep-0205/
+https://www.python.org/dev/peps/pep-0205/
"""
# Naming convention: Variables named "wr" are weak reference objects;
self.data = {}
self.update(other, **kw)
- def _commit_removals(self):
- l = self._pending_removals
+ def _commit_removals(self, _atomic_removal=_remove_dead_weakref):
+ pop = self._pending_removals.pop
d = self.data
# We shouldn't encounter any KeyError, because this method should
# always be called *before* mutating the dict.
- while l:
- key = l.pop()
- _remove_dead_weakref(d, key)
+ while True:
+ try:
+ key = pop()
+ except IndexError:
+ return
+ _atomic_removal(d, key)
def __getitem__(self, key):
if self._pending_removals:
if self._iterating:
self._pending_removals.append(k)
else:
- del self.data[k]
+ try:
+ del self.data[k]
+ except KeyError:
+ pass
self._remove = remove
# A list of dead weakrefs (keys to be removed)
self._pending_removals = []
# because a dead weakref never compares equal to a live weakref,
# even if they happened to refer to equal objects.
# However, it means keys may already have been removed.
- l = self._pending_removals
+ pop = self._pending_removals.pop
d = self.data
- while l:
+ while True:
+ try:
+ key = pop()
+ except IndexError:
+ return
+
try:
- del d[l.pop()]
+ del d[key]
except KeyError:
pass
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/psf/license for licensing details.
+# See https://www.python.org/psf/license for licensing details.
##
# Limited XInclude support for the ElementTree package.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/psf/license for licensing details.
+# See https://www.python.org/psf/license for licensing details.
##
# Implementation module for XPath support. There's usually no reason
#---------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/psf/license for licensing details.
+# See https://www.python.org/psf/license for licensing details.
#
# ElementTree
# Copyright (c) 1999-2008 by Fredrik Lundh. All rights reserved.
def __init__(self, events=None, *, _parser=None):
# The _parser argument is for internal use only and must not be relied
# upon in user code. It will be removed in a future release.
- # See http://bugs.python.org/issue17741 for more details.
+ # See https://bugs.python.org/issue17741 for more details.
self._events_queue = collections.deque()
self._parser = _parser or XMLParser(target=TreeBuilder())
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/psf/license for licensing details.
+# See https://www.python.org/psf/license for licensing details.
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
- url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
+ url = 'https://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
--- /dev/null
+From cef404f1e7a598166cbc2fd2e0048f7e2d752ad5 Mon Sep 17 00:00:00 2001
+From: David Carlier <devnexen@gmail.com>
+Date: Tue, 24 Aug 2021 22:40:14 +0100
+Subject: [PATCH] Darwin platform allows to build on releases before
+ Yosemite/ios 8.
+
+issue #16407 #16408
+---
+ crypto/rand/rand_unix.c | 5 +----
+ include/crypto/rand.h | 10 ++++++++++
+ 2 files changed, 11 insertions(+), 4 deletions(-)
+
+diff --git a/crypto/rand/rand_unix.c b/crypto/rand/rand_unix.c
+index 43f1069d15..0f4525106a 100644
+--- a/crypto/rand/rand_unix.c
++++ b/crypto/rand/rand_unix.c
+@@ -34,9 +34,6 @@
+ #if defined(__OpenBSD__)
+ # include <sys/param.h>
+ #endif
+-#if defined(__APPLE__)
+-# include <CommonCrypto/CommonRandom.h>
+-#endif
+
+ #if defined(OPENSSL_SYS_UNIX) || defined(__DJGPP__)
+ # include <sys/types.h>
+@@ -381,7 +378,7 @@ static ssize_t syscall_random(void *buf, size_t buflen)
+ if (errno != ENOSYS)
+ return -1;
+ }
+-# elif defined(__APPLE__)
++# elif defined(OPENSSL_APPLE_CRYPTO_RANDOM)
+ if (CCRandomGenerateBytes(buf, buflen) == kCCSuccess)
+ return (ssize_t)buflen;
+
+diff --git a/include/crypto/rand.h b/include/crypto/rand.h
+index 5350d3a931..674f840fd1 100644
+--- a/include/crypto/rand.h
++++ b/include/crypto/rand.h
+@@ -20,6 +20,16 @@
+
+ # include <openssl/rand.h>
+
++# if defined(__APPLE__) && !defined(OPENSSL_NO_APPLE_CRYPTO_RANDOM)
++# include <Availability.h>
++# if (defined(__MAC_OS_X_VERSION_MIN_REQUIRED) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101000) || \
++ (defined(__IPHONE_OS_VERSION_MIN_REQUIRED) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 80000)
++# define OPENSSL_APPLE_CRYPTO_RANDOM 1
++# include <CommonCrypto/CommonCryptoError.h>
++# include <CommonCrypto/CommonRandom.h>
++# endif
++# endif
++
+ /* forward declaration */
+ typedef struct rand_pool_st RAND_POOL;
+
+--
+2.33.0
+
that installer packages are submitted to and pass Apple's automated
notarization service using the altool command. To pass notarization,
the binaries included in the package must be built with at least
-the macOS 10.9 SDK, mout now be signed with the codesign utility
+the macOS 10.9 SDK, must now be signed with the codesign utility,
and executables must opt in to the hardened run time option with
any necessary entitlements. Details of these processes are
available in the on-line Apple Developer Documentation and man pages.
-As of 3.8.0 and 3.7.7, PSF practice is to build one installer variants
-for each release. Note that as of this writing, no Pythons support
-building on a newer version of macOS that will run on older versions
+A goal of PSF-provided (python.org) Python binaries for macOS is to
+support a wide-range of operating system releases with one set of
+binaries. Currently, the oldest release supported by python.org
+binaries is macOS 10.9; it is still possible to build Python and
+Python installers on older versions of macOS but we not regularly
+test on those systems nor provide binaries for them.
+
+Prior to Python 3.9.1, no Python releases supported building on a
+newer version of macOS that will run on older versions
by setting MACOSX_DEPLOYMENT_TARGET. This is because the various
-Python C modules do not yet support runtime testing of macOS
+Python C modules did not yet support runtime testing of macOS
feature availability (for example, by using macOS AvailabilityMacros.h
-and weak-linking). To build a Python that is to be used on a
-range of macOS releases, always build on the oldest release to be
-supported; the necessary shared libraries for that release will
-normally also be available on later systems, with the occasional
-exception such as the removal of 32-bit libraries in macOS 10.15.
-
-build-installer requires Apple Developer tools, either from the
+and weak-linking). To build a Python that is to be used on a
+range of macOS releases, it was necessary to always build on the
+oldest release to be supported; the necessary shared libraries for
+that release will normally also be available on later systems,
+with the occasional exception such as the removal of 32-bit
+libraries in macOS 10.15. For 3.9.x and recent earlier systems,
+PSF practice was to provide a "macOS 64-bit Intel installer" variant
+that was built on 10.9 that would run on macOS 10.9 and later.
+
+Starting with 3.9.1, Python fully supports macOS "weaklinking",
+meaning it is now possible to build a Python on a current macOS version
+with a deployment target of an earlier macOS system. For 3.9.1 and
+later systems, we provide a "macOS 64-bit universal2 installer"
+variant, currently build on macOS 11 Big Sur with fat binaries
+natively supporting both Apple Silicon (arm64) and Intel-64
+(x86_64) Macs running macOS 10.9 or later.
+
+The legacy "macOS 64-bit Intel installer" variant is expected to
+be retired prior to the end of 3.9.x support.
+
+build-installer.py requires Apple Developer tools, either from the
Command Line Tools package or from a full Xcode installation.
You should use the most recent version of either for the operating
system version in use. (One notable exception: on macOS 10.6,
Snow Leopard, use Xcode 3, not Xcode 4 which was released later
-in the 10.6 support cycle.)
+in the 10.6 support cycle.) build-installer.py also must be run
+with recent versions of Python 3.x or 2.7. On older systems,
+due to changes in TLS practices, it may be easier to manually
+download and cache third-party source distributions used by
+build-installer.py rather than have it attempt to automatically
+download them.
+
+1. universal2, arm64 and x86_64, for OS X 10.9 (and later)::
+
+ /path/to/bootstrap/python3 build-installer.py \
+ --universal-archs=universal2 \
+ --dep-target=10.9
+
+ - builds the following third-party libraries
+
+ * OpenSSL 1.1.1
+ * Tcl/Tk 8.6
+ * NCurses
+ * SQLite
+ * XZ
+ * libffi
+
+ - uses system-supplied versions of third-party libraries
+
+ * readline module links with Apple BSD editline (libedit)
+ * zlib
+ * bz2
+
+ - recommended build environment:
+
+ * Mac OS X 11 or later
+ * Xcode Command Line Tools 12.5 or later
+ * current default macOS SDK
+ * ``MACOSX_DEPLOYMENT_TARGET=10.9``
+ * Apple ``clang``
-1. 64-bit, x86_64, for OS X 10.9 (and later)::
+2. legacy Intel 64-bit, x86_64, for OS X 10.9 (and later)::
- /path/to/bootstrap/python2.7 build-installer.py \
+ /path/to/bootstrap/python3 build-installer.py \
--universal-archs=intel-64 \
--dep-target=10.9
"""
This script is used to build "official" universal installers on macOS.
+NEW for 3.10 and backports:
+- support universal2 variant with arm64 and x86_64 archs
+- enable clang optimizations when building on 10.15+
+
NEW for 3.9.0 and backports:
- 2.7 end-of-life issues:
- Python 3 installs now update the Current version link
def library_recipes():
result = []
- LT_10_5 = bool(getDeptargetTuple() < (10, 5))
-
# Since Apple removed the header files for the deprecated system
# OpenSSL as of the Xcode 7 release (for OS X 10.10+), we do not
# have much choice but to build our own copy here, too.
result.extend([
dict(
- name="OpenSSL 1.1.1k",
- url="https://www.openssl.org/source/openssl-1.1.1k.tar.gz",
- checksum='c4e7d95f782b08116afa27b30393dd27',
+ name="OpenSSL 1.1.1l",
+ url="https://www.openssl.org/source/openssl-1.1.1l.tar.gz",
+ checksum='ac0d4387f3ba0ad741b0580dd45f6ff3',
+ patches=['0001-Darwin-platform-allows-to-build-on-releases-before-Y.patch'],
buildrecipe=build_universal_openssl,
configure=None,
install=None,
'-DSQLITE_ENABLE_JSON1 '
'-DSQLITE_ENABLE_RTREE '
'-DSQLITE_TCL=0 '
- '%s' % ('','-DSQLITE_WITHOUT_ZONEMALLOC ')[LT_10_5]),
+ ),
configure_pre=[
'--enable-threadsafe',
'--enable-shared=no',
),
])
- if getDeptargetTuple() < (10, 5):
- result.extend([
- dict(
- name="Bzip2 1.0.6",
- url="http://bzip.org/1.0.6/bzip2-1.0.6.tar.gz",
- checksum='00b516f4704d4a7cb50a1d97e6e8e15b',
- configure=None,
- install='make install CC=%s CXX=%s, PREFIX=%s/usr/local/ CFLAGS="-arch %s"'%(
- CC, CXX,
- shellQuote(os.path.join(WORKDIR, 'libraries')),
- ' -arch '.join(ARCHLIST),
- ),
- ),
- dict(
- name="ZLib 1.2.3",
- url="http://www.gzip.org/zlib/zlib-1.2.3.tar.gz",
- checksum='debc62758716a169df9f62e6ab2bc634',
- configure=None,
- install='make install CC=%s CXX=%s, prefix=%s/usr/local/ CFLAGS="-arch %s"'%(
- CC, CXX,
- shellQuote(os.path.join(WORKDIR, 'libraries')),
- ' -arch '.join(ARCHLIST),
- ),
- ),
- dict(
- # Note that GNU readline is GPL'd software
- name="GNU Readline 6.1.2",
- url="http://ftp.gnu.org/pub/gnu/readline/readline-6.1.tar.gz" ,
- checksum='fc2f7e714fe792db1ce6ddc4c9fb4ef3',
- patchlevel='0',
- patches=[
- # The readline maintainers don't do actual micro releases, but
- # just ship a set of patches.
- ('http://ftp.gnu.org/pub/gnu/readline/readline-6.1-patches/readline61-001',
- 'c642f2e84d820884b0bf9fd176bc6c3f'),
- ('http://ftp.gnu.org/pub/gnu/readline/readline-6.1-patches/readline61-002',
- '1a76781a1ea734e831588285db7ec9b1'),
- ]
- ),
- ])
-
if not PYTHON_3:
result.extend([
dict(
\
In 1995, Guido continued his work on Python at the Corporation for National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) in Reston, Virginia where he released several versions of the software.\
\
-In May 2000, Guido and the Python core development team moved to BeOpen.com to form the BeOpen PythonLabs team. In October of the same year, the PythonLabs team moved to Digital Creations (now Zope Corporation, see http://www.zope.org). In 2001, the Python Software Foundation (PSF, see http://www.python.org/psf/) was formed, a non-profit organization created specifically to own Python-related Intellectual Property. Zope Corporation is a sponsoring member of the PSF.\
+In May 2000, Guido and the Python core development team moved to BeOpen.com to form the BeOpen PythonLabs team. In October of the same year, the PythonLabs team moved to Digital Creations (now Zope Corporation, see http://www.zope.org). In 2001, the Python Software Foundation (PSF, see https://www.python.org/psf/) was formed, a non-profit organization created specifically to own Python-related Intellectual Property. Zope Corporation is a sponsoring member of the PSF.\
\
All Python releases are Open Source (see http://www.opensource.org for the Open Source Definition). Historically, most, but not all, Python releases have also been GPL-compatible; the table below summarizes the various releases.\
\
aware, though, that the Cocoa-based AquaTk's supplied starting with macOS
10.6 have proven to be unstable. If possible, you should consider
installing a newer version before building on macOS 10.6 or later, such as
-the ActiveTcl 8.6. See http://www.python.org/download/mac/tcltk/. If you
+the ActiveTcl 8.6. See https://www.python.org/download/mac/tcltk/. If you
are building with an SDK, ensure that the newer Tcl and Tk frameworks are
seen in the SDK's ``Library/Frameworks`` directory; you may need to
manually create symlinks to their installed location, ``/Library/Frameworks``.
How do I create a binary distribution?
======================================
-Download and unpack the source release from http://www.python.org/download/.
+Download and unpack the source release from https://www.python.org/download/.
Go to the directory ``Mac/BuildScript``. There you will find a script
``build-installer.py`` that does all the work. This will download and build
a number of 3rd-party libaries, configures and builds a framework Python,
configure: WARNING: libintl.h: section "Present But Cannot Be Compiled"
configure: WARNING: libintl.h: proceeding with the preprocessor's result
configure: WARNING: libintl.h: in the future, the compiler will take precedence
- configure: WARNING: ## -------------------------------------- ##
- configure: WARNING: ## Report this to http://bugs.python.org/ ##
- configure: WARNING: ## -------------------------------------- ##
+ configure: WARNING: ## --------------------------------------- ##
+ configure: WARNING: ## Report this to https://bugs.python.org/ ##
+ configure: WARNING: ## --------------------------------------- ##
This almost always means you are trying to build a universal binary for
Python and have libraries in ``/usr/local`` that don't contain the required
Resources
=========
- * http://www.python.org/download/mac/
+ * https://www.python.org/downloads/macos/
- * http://www.python.org/community/sigs/current/pythonmac-sig/
+ * https://www.python.org/community/sigs/current/pythonmac-sig/
* https://devguide.python.org/
@$(UPDATE_FILE) $(srcdir)/Doc/data/python$(LDVERSION).abi $(srcdir)/Doc/data/python$(LDVERSION).abi.new
check-abidump: all
- abidiff "libpython$(LDVERSION).so" $(srcdir)/Doc/data/python$(LDVERSION).abi --drop-private-types --no-architecture --no-added-syms
+ abidiff $(srcdir)/Doc/data/python$(LDVERSION).abi "libpython$(LDVERSION).so" --drop-private-types --no-architecture --no-added-syms
############################################################################
# Regenerate all generated files
Florian Höch
Oleg Höfling
Robert Hölzl
+Stefan Hölzl
Catalin Iacob
Mihai Ibanescu
Ali Ikinci
Michael Otteneder
Richard Oudkerk
Russel Owen
+Noah Oxer
Joonas Paalasmaa
Martin Packman
Shriphani Palakodety
Python News
+++++++++++
+What's New in Python 3.9.7 final?
+=================================
+
+*Release date: 2021-08-30*
+
+Security
+--------
+
+- bpo-42278: Replaced usage of :func:`tempfile.mktemp` with
+ :class:`~tempfile.TemporaryDirectory` to avoid a potential race condition.
+
+- bpo-41180: Add auditing events to the :mod:`marshal` module, and stop
+ raising ``code.__init__`` events for every unmarshalled code object.
+ Directly instantiated code objects will continue to raise an event, and
+ audit event handlers should inspect or collect the raw marshal data. This
+ reduces a significant performance overhead when loading from ``.pyc``
+ files.
+
+- bpo-44394: Update the vendored copy of libexpat to 2.4.1 (from 2.2.8) to
+ get the fix for the CVE-2013-0340 "Billion Laughs" vulnerability. This
+ copy is most used on Windows and macOS.
+
+- bpo-43124: Made the internal ``putcmd`` function in :mod:`smtplib`
+ sanitize input for presence of ``\r`` and ``\n`` characters to avoid
+ (unlikely) command injection.
+
+Core and Builtins
+-----------------
+
+- bpo-45018: Fixed pickling of range iterators that iterated for over 2**32
+ times.
+
+- bpo-44962: Fix a race in WeakKeyDictionary, WeakValueDictionary and
+ WeakSet when two threads attempt to commit the last pending removal. This
+ fixes asyncio.create_task and fixes a data loss in asyncio.run where
+ shutdown_asyncgens is not run
+
+- bpo-44954: Fixed a corner case bug where the result of
+ ``float.fromhex('0x.8p-1074')`` was rounded the wrong way.
+
+- bpo-44947: Refine the syntax error for trailing commas in import
+ statements. Patch by Pablo Galindo.
+
+- bpo-44698: Restore behaviour of complex exponentiation with integer-valued
+ exponent of type :class:`float` or :class:`complex`.
+
+- bpo-44885: Correct the ast locations of f-strings with format specs and
+ repeated expressions. Patch by Pablo Galindo
+
+- bpo-44872: Use new trashcan macros (Py_TRASHCAN_BEGIN/END) in
+ frameobject.c instead of the old ones (Py_TRASHCAN_SAFE_BEGIN/END).
+
+- bpo-33930: Fix segmentation fault with deep recursion when cleaning method
+ objects. Patch by Augusto Goulart and Pablo Galindo.
+
+- bpo-25782: Fix bug where ``PyErr_SetObject`` hangs when the current
+ exception has a cycle in its context chain.
+
+- bpo-44856: Fix reference leaks in the error paths of ``update_bases()``
+ and ``__build_class__``. Patch by Pablo Galindo.
+
+- bpo-44698: Fix undefined behaviour in complex object exponentiation.
+
+- bpo-44562: Remove uses of :c:func:`PyObject_GC_Del` in error path when
+ initializing :class:`types.GenericAlias`.
+
+- bpo-44523: Remove the pass-through for :func:`hash` of
+ :class:`weakref.proxy` objects to prevent unintended consequences when the
+ original referred object dies while the proxy is part of a hashable
+ object. Patch by Pablo Galindo.
+
+- bpo-44472: Fix ltrace functionality when exceptions are raised. Patch by
+ Pablo Galindo
+
+- bpo-44184: Fix a crash at Python exit when a deallocator function removes
+ the last strong reference to a heap type. Patch by Victor Stinner.
+
+- bpo-39091: Fix crash when using passing a non-exception to a generator's
+ ``throw()`` method. Patch by Noah Oxer
+
+Library
+-------
+
+- bpo-41620: :meth:`~unittest.TestCase.run` now always return a
+ :class:`~unittest.TestResult` instance. Previously it returned ``None`` if
+ the test class or method was decorated with a skipping decorator.
+
+- bpo-43913: Fix bugs in cleaning up classes and modules in :mod:`unittest`:
+
+ * Functions registered with :func:`~unittest.addModuleCleanup` were not called unless the user defines ``tearDownModule()`` in their test module.
+ * Functions registered with :meth:`~unittest.TestCase.addClassCleanup` were not called if ``tearDownClass`` is set to ``None``.
+ * Buffering in :class:`~unittest.TestResult` did not work with functions registered with ``addClassCleanup()`` and ``addModuleCleanup()``.
+ * Errors in functions registered with ``addClassCleanup()`` and ``addModuleCleanup()`` were not handled correctly in buffered and debug modes.
+ * Errors in ``setUpModule()`` and functions registered with ``addModuleCleanup()`` were reported in wrong order.
+ * And several lesser bugs.
+
+- bpo-45001: Made email date parsing more robust against malformed input,
+ namely a whitespace-only ``Date:`` header. Patch by Wouter Bolsterlee.
+
+- bpo-44449: Fix a crash in the signal handler of the :mod:`faulthandler`
+ module: no longer modify the reference count of frame objects. Patch by
+ Victor Stinner.
+
+- bpo-44955: Method :meth:`~unittest.TestResult.stopTestRun` is now always
+ called in pair with method :meth:`~unittest.TestResult.startTestRun` for
+ :class:`~unittest.TestResult` objects implicitly created in
+ :meth:`~unittest.TestCase.run`. Previously it was not called for test
+ methods and classes decorated with a skipping decorator.
+
+- bpo-38956: :class:`argparse.BooleanOptionalAction`'s default value is no
+ longer printed twice when used with
+ :class:`argparse.ArgumentDefaultsHelpFormatter`.
+
+- bpo-44581: Upgrade bundled pip to 21.2.3 and setuptools to 57.4.0
+
+- bpo-44849: Fix the :func:`os.set_inheritable` function on FreeBSD 14 for
+ file descriptor opened with the :data:`~os.O_PATH` flag: ignore the
+ :data:`~errno.EBADF` error on ``ioctl()``, fallback on the ``fcntl()``
+ implementation. Patch by Victor Stinner.
+
+- bpo-44605: The @functools.total_ordering() decorator now works with
+ metaclasses.
+
+- bpo-44822: :mod:`sqlite3` user-defined functions and aggregators returning
+ :class:`strings <str>` with embedded NUL characters are no longer
+ truncated. Patch by Erlend E. Aasland.
+
+- bpo-44815: Always show ``loop=`` arg deprecations in
+ :func:`asyncio.gather` and :func:`asyncio.sleep`
+
+- bpo-44806: Non-protocol subclasses of :class:`typing.Protocol` ignore now
+ the ``__init__`` method inherited from protocol base classes.
+
+- bpo-44667: The :func:`tokenize.tokenize` doesn't incorrectly generate a
+ ``NEWLINE`` token if the source doesn't end with a new line character but
+ the last line is a comment, as the function is already generating a ``NL``
+ token. Patch by Pablo Galindo
+
+- bpo-42853: Fix ``http.client.HTTPSConnection`` fails to download >2GiB
+ data.
+
+- bpo-44752: :mod:`rcompleter` does not call :func:`getattr` on
+ :class:`property` objects to avoid the side-effect of evaluating the
+ corresponding method.
+
+- bpo-44720: ``weakref.proxy`` objects referencing non-iterators now raise
+ ``TypeError`` rather than dereferencing the null ``tp_iternext`` slot and
+ crashing.
+
+- bpo-44704: The implementation of ``collections.abc.Set._hash()`` now
+ matches that of ``frozenset.__hash__()``.
+
+- bpo-44666: Fixed issue in :func:`compileall.compile_file` when
+ ``sys.stdout`` is redirected. Patch by Stefan Hölzl.
+
+- bpo-40897: Give priority to using the current class constructor in
+ :func:`inspect.signature`. Patch by Weipeng Hong.
+
+- bpo-44608: Fix memory leak in :func:`_tkinter._flatten` if it is called
+ with a sequence or set, but not list or tuple.
+
+- bpo-41928: Update :func:`shutil.copyfile` to raise
+ :exc:`FileNotFoundError` instead of confusing :exc:`IsADirectoryError`
+ when a path ending with a :const:`os.path.sep` does not exist;
+ :func:`shutil.copy` and :func:`shutil.copy2` are also affected.
+
+- bpo-44566: handle StopIteration subclass raised from
+ @contextlib.contextmanager generator
+
+- bpo-44558: Make the implementation consistency of
+ :func:`~operator.indexOf` between C and Python versions. Patch by Dong-hee
+ Na.
+
+- bpo-41249: Fixes ``TypedDict`` to work with ``typing.get_type_hints()``
+ and postponed evaluation of annotations across modules.
+
+- bpo-44461: Fix bug with :mod:`pdb`'s handling of import error due to a
+ package which does not have a ``__main__`` module
+
+- bpo-42892: Fixed an exception thrown while parsing a malformed multipart
+ email by :class:`email.message.EmailMessage`.
+
+- bpo-27827: :meth:`pathlib.PureWindowsPath.is_reserved` now identifies a
+ greater range of reserved filenames, including those with trailing spaces
+ or colons.
+
+- bpo-34266: Handle exceptions from parsing the arg of :mod:`pdb`'s
+ run/restart command.
+
+- bpo-27334: The :mod:`sqlite3` context manager now performs a rollback
+ (thus releasing the database lock) if commit failed. Patch by Luca Citi
+ and Erlend E. Aasland.
+
+- bpo-43853: Improved string handling for :mod:`sqlite3` user-defined
+ functions and aggregates:
+
+ * It is now possible to pass strings with embedded null characters to UDFs
+ * Conversion failures now correctly raise :exc:`MemoryError`
+
+ Patch by Erlend E. Aasland.
+
+- bpo-43048: Handle `RecursionError` in
+ :class:`~traceback.TracebackException`'s constructor, so that long
+ exceptions chains are truncated instead of causing traceback formatting to
+ fail.
+
+- bpo-41402: Fix :meth:`email.message.EmailMessage.set_content` when called
+ with binary data and ``7bit`` content transfer encoding.
+
+- bpo-32695: The *compresslevel* and *preset* keyword arguments of
+ :func:`tarfile.open` are now both documented and tested.
+
+- bpo-34990: Fixed a Y2k38 bug in the compileall module where it would fail
+ to compile files with a modification time after the year 2038.
+
+- bpo-38840: Fix ``test___all__`` on platforms lacking a shared memory
+ implementation.
+
+- bpo-30256: Pass multiprocessing BaseProxy argument ``manager_owned``
+ through AutoProxy.
+
+- bpo-27513: :func:`email.utils.getaddresses` now accepts
+ :class:`email.header.Header` objects along with string values. Patch by
+ Zackery Spytz.
+
+- bpo-33349: lib2to3 now recognizes async generators everywhere.
+
+- bpo-29298: Fix ``TypeError`` when required subparsers without ``dest`` do
+ not receive arguments. Patch by Anthony Sottile.
+
+Documentation
+-------------
+
+- bpo-44903: Removed the othergui.rst file, any references to it, and the
+ list of GUI frameworks in the FAQ. In their place I've added links to the
+ Python Wiki `page on GUI frameworks
+ <https://wiki.python.org/moin/GuiProgramming>`.
+
+- bpo-44756: Reverted automated virtual environment creation on ``make
+ html`` when building documentation. It turned out to be disruptive for
+ downstream distributors.
+
+- bpo-44693: Update the definition of __future__ in the glossary by
+ replacing the confusing word "pseudo-module" with a more accurate
+ description.
+
+- bpo-35183: Add typical examples to os.path.splitext docs
+
+- bpo-30511: Clarify that :func:`shutil.make_archive` is not thread-safe due
+ to reliance on changing the current working directory.
+
+- bpo-44561: Update of three expired hyperlinks in
+ Doc/distributing/index.rst: "Project structure", "Building and packaging
+ the project", and "Uploading the project to the Python Packaging Index".
+
+- bpo-42958: Updated the docstring and docs of :func:`filecmp.cmp` to be
+ more accurate and less confusing especially in respect to *shallow* arg.
+
+- bpo-44558: Match the docstring and python implementation of
+ :func:`~operator.countOf` to the behavior of its c implementation.
+
+- bpo-44544: List all kwargs for :func:`textwrap.wrap`,
+ :func:`textwrap.fill`, and :func:`textwrap.shorten`. Now, there are nav
+ links to attributes of :class:`TextWrap`, which makes navigation much
+ easier while minimizing duplication in the documentation.
+
+- bpo-38062: Clarify that atexit uses equality comparisons internally.
+
+- bpo-43066: Added a warning to :mod:`zipfile` docs: filename arg with a
+ leading slash may cause archive to be un-openable on Windows systems.
+
+- bpo-27752: Documentation of csv.Dialect is more descriptive.
+
+- bpo-44453: Fix documentation for the return type of
+ :func:`sysconfig.get_path`.
+
+- bpo-39498: Add a "Security Considerations" index which links to standard
+ library modules that have explicitly documented security considerations.
+
+- bpo-33479: Remove the unqualified claim that tkinter is threadsafe. It has
+ not been true for several years and likely never was. An explanation of
+ what is true may be added later, after more discussion, and possibly after
+ patching _tkinter.c,
+
+Tests
+-----
+
+- bpo-25130: Add calls of :func:`gc.collect` in tests to support PyPy.
+
+- bpo-45011: Made tests relying on the :mod:`_asyncio` C extension module
+ optional to allow running on alternative Python implementations. Patch by
+ Serhiy Storchaka.
+
+- bpo-44949: Fix auto history tests of test_readline: sometimes, the newline
+ character is not written at the end, so don't expect it in the output.
+
+- bpo-44852: Add ability to wholesale silence DeprecationWarnings while
+ running the regression test suite.
+
+- bpo-40928: Notify users running test_decimal regression tests on macOS of
+ potential harmless "malloc can't allocate region" messages spewed by
+ test_decimal.
+
+- bpo-44734: Fixed floating point precision issue in turtle tests.
+
+- bpo-44708: Regression tests, when run with -w, are now re-running only the
+ affected test methods instead of re-running the entire test file.
+
+- bpo-30256: Add test for nested queues when using ``multiprocessing``
+ shared objects ``AutoProxy[Queue]`` inside ``ListProxy`` and ``DictProxy``
+
+Build
+-----
+
+- bpo-44535: Enable building using a Visual Studio 2022 install on Windows.
+
+- bpo-43298: Improved error message when building without a Windows SDK
+ installed.
+
+Windows
+-------
+
+- bpo-45007: Update to OpenSSL 1.1.1l in Windows build
+
+- bpo-44572: Avoid consuming standard input in the :mod:`platform` module
+
+- bpo-40263: This is a follow-on bug from
+ https://bugs.python.org/issue26903. Once that is applied we run into an
+ off-by-one assertion problem. The assert was not correct.
+
+macOS
+-----
+
+- bpo-45007: Update macOS installer builds to use OpenSSL 1.1.1l.
+
+- bpo-44689: :meth:`ctypes.util.find_library` now works correctly on macOS
+ 11 Big Sur even if Python is built on an older version of macOS.
+ Previously, when built on older macOS systems, ``find_library`` was not
+ able to find macOS system libraries when running on Big Sur due to
+ changes in how system libraries are stored.
+
+Tools/Demos
+-----------
+
+- bpo-44756: In the Makefile for documentation (:file:`Doc/Makefile`), the
+ ``build`` rule is dependent on the ``venv`` rule. Therefore, ``html``,
+ ``latex``, and other build-dependent rules are also now dependent on
+ ``venv``. The ``venv`` rule only performs an action if ``$(VENVDIR)`` does
+ not exist. :file:`Doc/README.rst` was updated; most users now only need to
+ type ``make html``.
+
+
What's New in Python 3.9.6 final?
=================================
Py_XINCREF(skipinitialspace);
Py_XINCREF(strict);
if (dialect != NULL) {
-#define DIALECT_GETATTR(v, n) \
- if (v == NULL) \
- v = PyObject_GetAttrString(dialect, n)
+#define DIALECT_GETATTR(v, n) \
+ do { \
+ if (v == NULL) { \
+ v = PyObject_GetAttrString(dialect, n); \
+ if (v == NULL) \
+ PyErr_Clear(); \
+ } \
+ } while (0)
DIALECT_GETATTR(delimiter, "delimiter");
DIALECT_GETATTR(doublequote, "doublequote");
DIALECT_GETATTR(escapechar, "escapechar");
DIALECT_GETATTR(quoting, "quoting");
DIALECT_GETATTR(skipinitialspace, "skipinitialspace");
DIALECT_GETATTR(strict, "strict");
- PyErr_Clear();
}
/* check types and convert to C values */
return r;
}
#else
-
+#ifdef __APPLE__
#ifdef HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH
+#define HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH_RUNTIME \
+ __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *)
+#else
+// Support the deprecated case of compiling on an older macOS version
+static void *libsystem_b_handle;
+static bool (*_dyld_shared_cache_contains_path)(const char *path);
+
+__attribute__((constructor)) void load_dyld_shared_cache_contains_path(void) {
+ libsystem_b_handle = dlopen("/usr/lib/libSystem.B.dylib", RTLD_LAZY);
+ if (libsystem_b_handle != NULL) {
+ _dyld_shared_cache_contains_path = dlsym(libsystem_b_handle, "_dyld_shared_cache_contains_path");
+ }
+}
+
+__attribute__((destructor)) void unload_dyld_shared_cache_contains_path(void) {
+ if (libsystem_b_handle != NULL) {
+ dlclose(libsystem_b_handle);
+ }
+}
+#define HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH_RUNTIME \
+ _dyld_shared_cache_contains_path != NULL
+#endif
+
static PyObject *py_dyld_shared_cache_contains_path(PyObject *self, PyObject *args)
{
PyObject *name, *name2;
char *name_str;
- if (__builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *)) {
+ if (HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH_RUNTIME) {
int r;
if (!PyArg_ParseTuple(args, "O", &name))
{"dlclose", py_dl_close, METH_VARARGS, "dlclose a library"},
{"dlsym", py_dl_sym, METH_VARARGS, "find symbol in shared library"},
#endif
-#ifdef HAVE_DYLD_SHARED_CACHE_CONTAINS_PATH
+#ifdef __APPLE__
{"_dyld_shared_cache_contains_path", py_dyld_shared_cache_contains_path, METH_VARARGS, "check if path is in the shared cache"},
#endif
{"alignment", align_func, METH_O, alignment_doc},
A number of SysV or ncurses functions don't have wrappers yet; if you
need a given function, add it and send a patch. See
- http://www.python.org/dev/patches/ for instructions on how to submit
+ https://www.python.org/dev/patches/ for instructions on how to submit
patches to Python.
Here's a list of currently unsupported functions:
/*--------------------------------------------------------------------
* Licensed to PSF under a Contributor Agreement.
- * See http://www.python.org/psf/license for licensing details.
+ * See https://www.python.org/psf/license for licensing details.
*
* _elementtree - C accelerator for xml.etree.ElementTree
* Copyright (c) 1999-2009 by Secret Labs AB. All rights reserved.
/*[clinic input]
_operator.countOf = _operator.indexOf
-Return the number of times b occurs in a.
+Return the number of items in a which are, or which equal, b.
[clinic start generated code]*/
static Py_ssize_t
_operator_countOf_impl(PyObject *module, PyObject *a, PyObject *b)
-/*[clinic end generated code: output=9e1623197daf3382 input=0c3a2656add252db]*/
+/*[clinic end generated code: output=9e1623197daf3382 input=93ea57f170f3f0bb]*/
{
return PySequence_Count(a, b);
}
database = PyBytes_AsString(database_obj);
- self->initialized = 1;
-
self->begin_statement = NULL;
Py_CLEAR(self->statement_cache);
Py_INCREF(isolation_level);
}
Py_CLEAR(self->isolation_level);
- if (pysqlite_connection_set_isolation_level(self, isolation_level, NULL) < 0) {
+ if (pysqlite_connection_set_isolation_level(self, isolation_level, NULL) != 0) {
Py_DECREF(isolation_level);
return -1;
}
self->ProgrammingError = pysqlite_ProgrammingError;
self->NotSupportedError = pysqlite_NotSupportedError;
+ self->initialized = 1;
+
return 0;
}
return NULL;
}
+ if (!self->initialized) {
+ PyErr_SetString(pysqlite_ProgrammingError,
+ "Base Connection.__init__ not called.");
+ return NULL;
+ }
+
pysqlite_do_all_statements(self, ACTION_FINALIZE, 1);
if (self->db) {
} else if (PyFloat_Check(py_val)) {
sqlite3_result_double(context, PyFloat_AsDouble(py_val));
} else if (PyUnicode_Check(py_val)) {
- const char *str = PyUnicode_AsUTF8(py_val);
- if (str == NULL)
+ Py_ssize_t sz;
+ const char *str = PyUnicode_AsUTF8AndSize(py_val, &sz);
+ if (str == NULL) {
+ return -1;
+ }
+ if (sz > INT_MAX) {
+ PyErr_SetString(PyExc_OverflowError,
+ "string is longer than INT_MAX bytes");
return -1;
- sqlite3_result_text(context, str, -1, SQLITE_TRANSIENT);
+ }
+ sqlite3_result_text(context, str, (int)sz, SQLITE_TRANSIENT);
} else if (PyObject_CheckBuffer(py_val)) {
Py_buffer view;
if (PyObject_GetBuffer(py_val, &view, PyBUF_SIMPLE) != 0) {
int i;
sqlite3_value* cur_value;
PyObject* cur_py_value;
- const char* val_str;
Py_ssize_t buflen;
args = PyTuple_New(argc);
case SQLITE_FLOAT:
cur_py_value = PyFloat_FromDouble(sqlite3_value_double(cur_value));
break;
- case SQLITE_TEXT:
- val_str = (const char*)sqlite3_value_text(cur_value);
- cur_py_value = PyUnicode_FromString(val_str);
- /* TODO: have a way to show errors here */
- if (!cur_py_value) {
- PyErr_Clear();
- Py_INCREF(Py_None);
- cur_py_value = Py_None;
+ case SQLITE_TEXT: {
+ sqlite3 *db = sqlite3_context_db_handle(context);
+ const char *text = (const char *)sqlite3_value_text(cur_value);
+
+ if (text == NULL && sqlite3_errcode(db) == SQLITE_NOMEM) {
+ PyErr_NoMemory();
+ goto error;
}
+
+ Py_ssize_t size = sqlite3_value_bytes(cur_value);
+ cur_py_value = PyUnicode_FromStringAndSize(text, size);
break;
+ }
case SQLITE_BLOB:
buflen = sqlite3_value_bytes(cur_value);
cur_py_value = PyBytes_FromStringAndSize(
}
if (!cur_py_value) {
- Py_DECREF(args);
- return NULL;
+ goto error;
}
PyTuple_SetItem(args, i, cur_py_value);
}
return args;
+
+error:
+ Py_DECREF(args);
+ return NULL;
}
void _pysqlite_func_callback(sqlite3_context* context, int argc, sqlite3_value** argv)
static PyObject* pysqlite_connection_get_isolation_level(pysqlite_Connection* self, void* unused)
{
+ if (!pysqlite_check_connection(self)) {
+ return NULL;
+ }
Py_INCREF(self->isolation_level);
return self->isolation_level;
}
return -1;
}
if (isolation_level == Py_None) {
- PyObject *res = pysqlite_connection_commit(self, NULL);
- if (!res) {
- return -1;
+ /* We might get called during connection init, so we cannot use
+ * pysqlite_connection_commit() here. */
+ if (self->db && !sqlite3_get_autocommit(self->db)) {
+ int rc;
+ Py_BEGIN_ALLOW_THREADS
+ rc = sqlite3_exec(self->db, "COMMIT", NULL, NULL, NULL);
+ Py_END_ALLOW_THREADS
+ if (rc != SQLITE_OK) {
+ return _pysqlite_seterror(self->db, NULL);
+ }
}
- Py_DECREF(res);
self->begin_statement = NULL;
} else {
static PyObject *
pysqlite_connection_exit(pysqlite_Connection* self, PyObject* args)
{
- PyObject* exc_type, *exc_value, *exc_tb;
- const char* method_name;
- PyObject* result;
-
+ PyObject *exc_type, *exc_value, *exc_tb;
if (!PyArg_ParseTuple(args, "OOO", &exc_type, &exc_value, &exc_tb)) {
return NULL;
}
+ int commit = 0;
+ PyObject *result;
if (exc_type == Py_None && exc_value == Py_None && exc_tb == Py_None) {
- method_name = "commit";
- } else {
- method_name = "rollback";
+ commit = 1;
+ result = pysqlite_connection_commit(self, NULL);
}
-
- result = PyObject_CallMethod((PyObject*)self, method_name, NULL);
- if (!result) {
+ else {
+ result = pysqlite_connection_rollback(self, NULL);
+ }
+
+ if (result == NULL) {
+ if (commit) {
+ /* Commit failed; try to rollback in order to unlock the database.
+ * If rollback also fails, chain the exceptions. */
+ PyObject *exc, *val, *tb;
+ PyErr_Fetch(&exc, &val, &tb);
+ result = pysqlite_connection_rollback(self, NULL);
+ if (result == NULL) {
+ _PyErr_ChainExceptions(exc, val, tb);
+ }
+ else {
+ Py_DECREF(result);
+ PyErr_Restore(exc, val, tb);
+ }
+ }
return NULL;
}
Py_DECREF(result);
goto error;
}
- /* validata cadata type and load cadata */
+ /* validate cadata type and load cadata */
if (cadata) {
if (PyUnicode_Check(cadata)) {
PyObject *cadata_ascii = PyUnicode_AsASCIIString(cadata);
Py_RETURN_NONE;
}
-/* Example passing NULLs to PyObject_Str(NULL). */
+static PyObject *
+pyobject_repr_from_null(PyObject *self, PyObject *Py_UNUSED(ignored))
+{
+ return PyObject_Repr(NULL);
+}
static PyObject *
-test_null_strings(PyObject *self, PyObject *Py_UNUSED(ignored))
+pyobject_str_from_null(PyObject *self, PyObject *Py_UNUSED(ignored))
{
- PyObject *o1 = PyObject_Str(NULL), *o2 = PyObject_Str(NULL);
- PyObject *tuple = PyTuple_Pack(2, o1, o2);
- Py_XDECREF(o1);
- Py_XDECREF(o2);
- return tuple;
+ return PyObject_Str(NULL);
+}
+
+static PyObject *
+pyobject_bytes_from_null(PyObject *self, PyObject *Py_UNUSED(ignored))
+{
+ return PyObject_Bytes(NULL);
}
static PyObject *
{"test_k_code", test_k_code, METH_NOARGS},
{"test_empty_argparse", test_empty_argparse, METH_NOARGS},
{"parse_tuple_and_keywords", parse_tuple_and_keywords, METH_VARARGS},
- {"test_null_strings", test_null_strings, METH_NOARGS},
+ {"pyobject_repr_from_null", pyobject_repr_from_null, METH_NOARGS},
+ {"pyobject_str_from_null", pyobject_str_from_null, METH_NOARGS},
+ {"pyobject_bytes_from_null", pyobject_bytes_from_null, METH_NOARGS},
{"test_string_from_format", (PyCFunction)test_string_from_format, METH_NOARGS},
{"test_with_docstring", test_with_docstring, METH_NOARGS,
PyDoc_STR("This is a pretty normal docstring.")},
context.size = 0;
- if (!_flatten1(&context, item,0))
+ if (!_flatten1(&context, item, 0)) {
+ Py_XDECREF(context.tuple);
return NULL;
+ }
if (_PyTuple_Resize(&context.tuple, context.size))
return NULL;
*/
/* Licensed to PSF under a Contributor Agreement. */
-/* See http://www.python.org/2.4/license for licensing details. */
+/* See https://www.python.org/2.4/license for licensing details. */
#include "Python.h"
#include "structmember.h" // PyMemberDef
nhandles = PySequence_Length(handle_seq);
if (nhandles == -1)
return NULL;
- if (nhandles < 0 || nhandles >= MAXIMUM_WAIT_OBJECTS - 1) {
+ if (nhandles < 0 || nhandles > MAXIMUM_WAIT_OBJECTS - 1) {
PyErr_Format(PyExc_ValueError,
"need at most %zd handles, got a sequence of length %zd",
MAXIMUM_WAIT_OBJECTS - 1, nhandles);
"countOf($module, a, b, /)\n"
"--\n"
"\n"
-"Return the number of times b occurs in a.");
+"Return the number of items in a which are, or which equal, b.");
#define _OPERATOR_COUNTOF_METHODDEF \
{"countOf", (PyCFunction)(void(*)(void))_operator_countOf, METH_FASTCALL, _operator_countOf__doc__},
exit:
return return_value;
}
-/*[clinic end generated code: output=e7ed71a8c475a901 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=ed2bc172e42320d8 input=a9049054013a1b77]*/
Copyright (c) 1998-2000 Thai Open Source Software Center Ltd and Clark Cooper
-Copyright (c) 2001-2017 Expat maintainers
+Copyright (c) 2001-2019 Expat maintainers
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
\___/_/\_\ .__/ \__,_|\__|
|_| XML parser
- Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 1999-2000 Thai Open Source Software Center Ltd
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2007 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2017 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2017 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2000-2005 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2001-2002 Greg Stein <gstein@users.sourceforge.net>
+ Copyright (c) 2002-2016 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2016-2021 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2016 Cristian RodrÃguez <crrodriguez@opensuse.org>
+ Copyright (c) 2016 Thomas Beutlich <tc@tbeu.de>
+ Copyright (c) 2017 Rhodri James <rhodri@wildebeest.org.uk>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
XML_ERROR_RESERVED_PREFIX_XMLNS,
XML_ERROR_RESERVED_NAMESPACE_URI,
/* Added in 2.2.1. */
- XML_ERROR_INVALID_ARGUMENT
+ XML_ERROR_INVALID_ARGUMENT,
+ /* Added in 2.3.0. */
+ XML_ERROR_NO_BUFFER,
+ /* Added in 2.4.0. */
+ XML_ERROR_AMPLIFICATION_LIMIT_BREACH
};
enum XML_Content_Type {
For internal entities (<!ENTITY foo "bar">), value will
be non-NULL and systemId, publicID, and notationName will be NULL.
- The value string is NOT nul-terminated; the length is provided in
+ The value string is NOT null-terminated; the length is provided in
the value_length argument. Since it is legal to have zero-length
values, do not use this argument to test for internal entities.
Otherwise it must return XML_STATUS_ERROR.
If info does not describe a suitable encoding, then the parser will
- return an XML_UNKNOWN_ENCODING error.
+ return an XML_ERROR_UNKNOWN_ENCODING error.
*/
typedef int(XMLCALL *XML_UnknownEncodingHandler)(void *encodingHandlerData,
const XML_Char *name,
/* Returns the number of the attribute/value pairs passed in last call
to the XML_StartElementHandler that were specified in the start-tag
rather than defaulted. Each attribute/value pair counts as 2; thus
- this correspondds to an index into the atts array passed to the
+ this corresponds to an index into the atts array passed to the
XML_StartElementHandler. Returns -1 if parser == NULL.
*/
XMLPARSEAPI(int)
/* Returns the index of the ID attribute passed in the last call to
XML_StartElementHandler, or -1 if there is no ID attribute or
parser == NULL. Each attribute/value pair counts as 2; thus this
- correspondds to an index into the atts array passed to the
+ corresponds to an index into the atts array passed to the
XML_StartElementHandler.
*/
XMLPARSEAPI(int)
XML_FEATURE_SIZEOF_XML_LCHAR,
XML_FEATURE_NS,
XML_FEATURE_LARGE_SIZE,
- XML_FEATURE_ATTR_INFO
+ XML_FEATURE_ATTR_INFO,
+ /* Added in Expat 2.4.0. */
+ XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_MAXIMUM_AMPLIFICATION_DEFAULT,
+ XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT
/* Additional features must be added to the end of this enum. */
};
XMLPARSEAPI(const XML_Feature *)
XML_GetFeatureList(void);
+#ifdef XML_DTD
+/* Added in Expat 2.4.0. */
+XMLPARSEAPI(XML_Bool)
+XML_SetBillionLaughsAttackProtectionMaximumAmplification(
+ XML_Parser parser, float maximumAmplificationFactor);
+
+/* Added in Expat 2.4.0. */
+XMLPARSEAPI(XML_Bool)
+XML_SetBillionLaughsAttackProtectionActivationThreshold(
+ XML_Parser parser, unsigned long long activationThresholdBytes);
+#endif
+
/* Expat follows the semantic versioning convention.
See http://semver.org.
*/
#define XML_MAJOR_VERSION 2
-#define XML_MINOR_VERSION 2
-#define XML_MICRO_VERSION 8
+#define XML_MINOR_VERSION 4
+#define XML_MICRO_VERSION 1
#ifdef __cplusplus
}
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2000-2004 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2001-2002 Greg Stein <gstein@users.sourceforge.net>
+ Copyright (c) 2002-2006 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2016 Cristian RodrÃguez <crrodriguez@opensuse.org>
+ Copyright (c) 2016-2019 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2017 Rhodri James <rhodri@wildebeest.org.uk>
+ Copyright (c) 2018 Yury Gribov <tetra2005@gmail.com>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2017 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
\___/_/\_\ .__/ \__,_|\__|
|_| XML parser
- Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2002-2003 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2002-2006 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2003 Greg Stein <gstein@users.sourceforge.net>
+ Copyright (c) 2016-2021 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2018 Yury Gribov <tetra2005@gmail.com>
+ Copyright (c) 2019 David Loffredo <loffredo@steptools.com>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
# endif
#endif
+#include <limits.h> // ULONG_MAX
+
+#if defined(_WIN32) && ! defined(__USE_MINGW_ANSI_STDIO)
+# define EXPAT_FMT_ULL(midpart) "%" midpart "I64u"
+# if defined(_WIN64) // Note: modifiers "td" and "zu" do not work for MinGW
+# define EXPAT_FMT_PTRDIFF_T(midpart) "%" midpart "I64d"
+# define EXPAT_FMT_SIZE_T(midpart) "%" midpart "I64u"
+# else
+# define EXPAT_FMT_PTRDIFF_T(midpart) "%" midpart "d"
+# define EXPAT_FMT_SIZE_T(midpart) "%" midpart "u"
+# endif
+#else
+# define EXPAT_FMT_ULL(midpart) "%" midpart "llu"
+# if ! defined(ULONG_MAX)
+# error Compiler did not define ULONG_MAX for us
+# elif ULONG_MAX == 18446744073709551615u // 2^64-1
+# define EXPAT_FMT_PTRDIFF_T(midpart) "%" midpart "ld"
+# define EXPAT_FMT_SIZE_T(midpart) "%" midpart "lu"
+# else
+# define EXPAT_FMT_PTRDIFF_T(midpart) "%" midpart "d"
+# define EXPAT_FMT_SIZE_T(midpart) "%" midpart "u"
+# endif
+#endif
+
#ifndef UNUSED_P
# define UNUSED_P(p) (void)p
#endif
+/* NOTE BEGIN If you ever patch these defaults to greater values
+ for non-attack XML payload in your environment,
+ please file a bug report with libexpat. Thank you!
+*/
+#define EXPAT_BILLION_LAUGHS_ATTACK_PROTECTION_MAXIMUM_AMPLIFICATION_DEFAULT \
+ 100.0f
+#define EXPAT_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT \
+ 8388608 // 8 MiB, 2^23
+/* NOTE END */
+
+#include "expat.h" // so we can use type XML_Parser below
+
#ifdef __cplusplus
extern "C" {
#endif
-#ifdef XML_ENABLE_VISIBILITY
-# if XML_ENABLE_VISIBILITY
-__attribute__((visibility("default")))
-# endif
+void _INTERNAL_trim_to_complete_utf8_characters(const char *from,
+ const char **fromLimRef);
+
+#if defined(XML_DTD)
+unsigned long long testingAccountingGetCountBytesDirect(XML_Parser parser);
+unsigned long long testingAccountingGetCountBytesIndirect(XML_Parser parser);
+const char *unsignedCharToPrintable(unsigned char c);
#endif
-void
-_INTERNAL_trim_to_complete_utf8_characters(const char *from,
- const char **fromLimRef);
#ifdef __cplusplus
}
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2017 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
\___/_/\_\ .__/ \__,_|\__|
|_| XML parser
- Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2017 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
* --------------------------------------------------------------------------
* HISTORY:
*
+ * 2020-10-03 (Sebastian Pipping)
+ * - Drop support for Visual Studio 9.0/2008 and earlier
+ *
* 2019-08-03 (Sebastian Pipping)
* - Mark part of sip24_valid as to be excluded from clang-format
* - Re-format code using clang-format 9
#define SIPHASH_H
#include <stddef.h> /* size_t */
-
-#if defined(_WIN32) && defined(_MSC_VER) && (_MSC_VER < 1600)
-/* For vs2003/7.1 up to vs2008/9.0; _MSC_VER 1600 is vs2010/10.0 */
-typedef unsigned __int8 uint8_t;
-typedef unsigned __int32 uint32_t;
-typedef unsigned __int64 uint64_t;
-#else
-# include <stdint.h> /* uint64_t uint32_t uint8_t */
-#endif
+#include <stdint.h> /* uint64_t uint32_t uint8_t */
/*
* Workaround to not require a C++11 compiler for using ULL suffix
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2017 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
\___/_/\_\ .__/ \__,_|\__|
|_| XML parser
- Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Greg Stein <gstein@users.sourceforge.net>
+ Copyright (c) 2005 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2017-2021 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
#include <memory.h>
#include <string.h>
-#if defined(HAVE_EXPAT_CONFIG_H) /* e.g. MinGW */
-# include <expat_config.h>
-#else /* !defined(HAVE_EXPAT_CONFIG_H) */
-
-# define XML_NS 1
-# define XML_DTD 1
-# define XML_CONTEXT_BYTES 1024
-
-/* we will assume all Windows platforms are little endian */
-# define BYTEORDER 1234
-
-#endif /* !defined(HAVE_EXPAT_CONFIG_H) */
-
#endif /* ndef WINCONFIG_H */
-/* f2d0ab6d1d4422a08cf1cf3bbdfba96b49dea42fb5ff4615e03a2a25c306e769 (2.2.8+)
+/* 8539b9040d9d901366a62560a064af7cb99811335784b363abc039c5b0ebc416 (2.4.1+)
__ __ _
___\ \/ /_ __ __ _| |_
/ _ \\ /| '_ \ / _` | __|
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2000-2006 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2001-2002 Greg Stein <gstein@users.sourceforge.net>
+ Copyright (c) 2002-2016 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2005-2009 Steven Solie <ssolie@users.sourceforge.net>
+ Copyright (c) 2016 Eric Rahm <erahm@mozilla.com>
+ Copyright (c) 2016-2021 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2016 Gaurav <g.gupta@samsung.com>
+ Copyright (c) 2016 Thomas Beutlich <tc@tbeu.de>
+ Copyright (c) 2016 Gustavo Grieco <gustavo.grieco@imag.fr>
+ Copyright (c) 2016 Pascal Cuoq <cuoq@trust-in-soft.com>
+ Copyright (c) 2016 Ed Schouten <ed@nuxi.nl>
+ Copyright (c) 2017-2018 Rhodri James <rhodri@wildebeest.org.uk>
+ Copyright (c) 2017 Václav SlavÃk <vaclav@slavik.io>
+ Copyright (c) 2017 Viktor Szakats <commit@vsz.me>
+ Copyright (c) 2017 Chanho Park <chanho61.park@samsung.com>
+ Copyright (c) 2017 Rolf Eike Beer <eike@sf-mail.de>
+ Copyright (c) 2017 Hans Wennborg <hans@chromium.org>
+ Copyright (c) 2018 Anton Maklakov <antmak.pub@gmail.com>
+ Copyright (c) 2018 Benjamin Peterson <benjamin@python.org>
+ Copyright (c) 2018 Marco Maggi <marco.maggi-ipsu@poste.it>
+ Copyright (c) 2018 Mariusz Zaborski <oshogbo@vexillium.org>
+ Copyright (c) 2019 David Loffredo <loffredo@steptools.com>
+ Copyright (c) 2019-2020 Ben Wagner <bungeman@chromium.org>
+ Copyright (c) 2019 Vadim Zeitlin <vadim@zeitlins.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
#ifdef _WIN32
/* force stdlib to define rand_s() */
-# define _CRT_RAND_S
+# if ! defined(_CRT_RAND_S)
+# define _CRT_RAND_S
+# endif
#endif
#include <stddef.h>
#include <limits.h> /* UINT_MAX */
#include <stdio.h> /* fprintf */
#include <stdlib.h> /* getenv, rand_s */
+#include <stdint.h> /* uintptr_t */
+#include <math.h> /* isnan */
#ifdef _WIN32
# define getpid GetCurrentProcessId
#ifdef _WIN32
# include "winconfig.h"
-#elif defined(HAVE_EXPAT_CONFIG_H)
-# include <expat_config.h>
-#endif /* ndef _WIN32 */
+#endif
+
+#include <expat_config.h>
#include "ascii.h"
#include "expat.h"
enabled. For end user security, that is probably not what you want. \
\
Your options include: \
- * Linux + glibc >=2.25 (getrandom): HAVE_GETRANDOM, \
- * Linux + glibc <2.25 (syscall SYS_getrandom): HAVE_SYSCALL_GETRANDOM, \
+ * Linux >=3.17 + glibc >=2.25 (getrandom): HAVE_GETRANDOM, \
+ * Linux >=3.17 + glibc (including <2.25) (syscall SYS_getrandom): HAVE_SYSCALL_GETRANDOM, \
* BSD / macOS >=10.7 (arc4random_buf): HAVE_ARC4RANDOM_BUF, \
- * BSD / macOS <10.7 (arc4random): HAVE_ARC4RANDOM, \
+ * BSD / macOS (including <10.7) (arc4random): HAVE_ARC4RANDOM, \
* libbsd (arc4random_buf): HAVE_ARC4RANDOM_BUF + HAVE_LIBBSD, \
* libbsd (arc4random): HAVE_ARC4RANDOM + HAVE_LIBBSD, \
- * Linux / BSD / macOS (/dev/urandom): XML_DEV_URANDOM \
- * Windows (rand_s): _WIN32. \
+ * Linux (including <3.17) / BSD / macOS (including <10.7) (/dev/urandom): XML_DEV_URANDOM, \
+ * Windows >=Vista (rand_s): _WIN32. \
\
If insist on not using any of these, bypass this error by defining \
XML_POOR_ENTROPY; you have been warned. \
# define XmlGetInternalEncoding XmlGetUtf16InternalEncoding
# define XmlGetInternalEncodingNS XmlGetUtf16InternalEncodingNS
# define XmlEncode XmlUtf16Encode
-/* Using pointer subtraction to convert to integer type. */
-# define MUST_CONVERT(enc, s) \
- (! (enc)->isUtf16 || (((char *)(s) - (char *)NULL) & 1))
+# define MUST_CONVERT(enc, s) (! (enc)->isUtf16 || (((uintptr_t)(s)) & 1))
typedef unsigned short ICHAR;
#else
# define XML_ENCODE_MAX XML_UTF8_ENCODE_MAX
XML_Bool betweenDecl; /* WFC: PE Between Declarations */
} OPEN_INTERNAL_ENTITY;
+enum XML_Account {
+ XML_ACCOUNT_DIRECT, /* bytes directly passed to the Expat parser */
+ XML_ACCOUNT_ENTITY_EXPANSION, /* intermediate bytes produced during entity
+ expansion */
+ XML_ACCOUNT_NONE /* i.e. do not account, was accounted already */
+};
+
+#ifdef XML_DTD
+typedef unsigned long long XmlBigCount;
+typedef struct accounting {
+ XmlBigCount countBytesDirect;
+ XmlBigCount countBytesIndirect;
+ int debugLevel;
+ float maximumAmplificationFactor; // >=1.0
+ unsigned long long activationThresholdBytes;
+} ACCOUNTING;
+
+typedef struct entity_stats {
+ unsigned int countEverOpened;
+ unsigned int currentDepth;
+ unsigned int maximumDepthSeen;
+ int debugLevel;
+} ENTITY_STATS;
+#endif /* XML_DTD */
+
typedef enum XML_Error PTRCALL Processor(XML_Parser parser, const char *start,
const char *end, const char **endPtr);
static enum XML_Error doProlog(XML_Parser parser, const ENCODING *enc,
const char *s, const char *end, int tok,
const char *next, const char **nextPtr,
- XML_Bool haveMore, XML_Bool allowClosingDoctype);
+ XML_Bool haveMore, XML_Bool allowClosingDoctype,
+ enum XML_Account account);
static enum XML_Error processInternalEntity(XML_Parser parser, ENTITY *entity,
XML_Bool betweenDecl);
static enum XML_Error doContent(XML_Parser parser, int startTagLevel,
const ENCODING *enc, const char *start,
const char *end, const char **endPtr,
- XML_Bool haveMore);
+ XML_Bool haveMore, enum XML_Account account);
static enum XML_Error doCdataSection(XML_Parser parser, const ENCODING *,
const char **startPtr, const char *end,
- const char **nextPtr, XML_Bool haveMore);
+ const char **nextPtr, XML_Bool haveMore,
+ enum XML_Account account);
#ifdef XML_DTD
static enum XML_Error doIgnoreSection(XML_Parser parser, const ENCODING *,
const char **startPtr, const char *end,
static void freeBindings(XML_Parser parser, BINDING *bindings);
static enum XML_Error storeAtts(XML_Parser parser, const ENCODING *,
const char *s, TAG_NAME *tagNamePtr,
- BINDING **bindingsPtr);
+ BINDING **bindingsPtr,
+ enum XML_Account account);
static enum XML_Error addBinding(XML_Parser parser, PREFIX *prefix,
const ATTRIBUTE_ID *attId, const XML_Char *uri,
BINDING **bindingsPtr);
XML_Parser parser);
static enum XML_Error storeAttributeValue(XML_Parser parser, const ENCODING *,
XML_Bool isCdata, const char *,
- const char *, STRING_POOL *);
+ const char *, STRING_POOL *,
+ enum XML_Account account);
static enum XML_Error appendAttributeValue(XML_Parser parser, const ENCODING *,
XML_Bool isCdata, const char *,
- const char *, STRING_POOL *);
+ const char *, STRING_POOL *,
+ enum XML_Account account);
static ATTRIBUTE_ID *getAttributeId(XML_Parser parser, const ENCODING *enc,
const char *start, const char *end);
static int setElementTypePrefix(XML_Parser parser, ELEMENT_TYPE *);
static enum XML_Error storeEntityValue(XML_Parser parser, const ENCODING *enc,
- const char *start, const char *end);
+ const char *start, const char *end,
+ enum XML_Account account);
static int reportProcessingInstruction(XML_Parser parser, const ENCODING *enc,
const char *start, const char *end);
static int reportComment(XML_Parser parser, const ENCODING *enc,
static void parserInit(XML_Parser parser, const XML_Char *encodingName);
+#ifdef XML_DTD
+static float accountingGetCurrentAmplification(XML_Parser rootParser);
+static void accountingReportStats(XML_Parser originParser, const char *epilog);
+static void accountingOnAbort(XML_Parser originParser);
+static void accountingReportDiff(XML_Parser rootParser,
+ unsigned int levelsAwayFromRootParser,
+ const char *before, const char *after,
+ ptrdiff_t bytesMore, int source_line,
+ enum XML_Account account);
+static XML_Bool accountingDiffTolerated(XML_Parser originParser, int tok,
+ const char *before, const char *after,
+ int source_line,
+ enum XML_Account account);
+
+static void entityTrackingReportStats(XML_Parser parser, ENTITY *entity,
+ const char *action, int sourceLine);
+static void entityTrackingOnOpen(XML_Parser parser, ENTITY *entity,
+ int sourceLine);
+static void entityTrackingOnClose(XML_Parser parser, ENTITY *entity,
+ int sourceLine);
+
+static XML_Parser getRootParserOf(XML_Parser parser,
+ unsigned int *outLevelDiff);
+#endif /* XML_DTD */
+
+static unsigned long getDebugLevel(const char *variableName,
+ unsigned long defaultDebugLevel);
+
#define poolStart(pool) ((pool)->start)
#define poolEnd(pool) ((pool)->ptr)
#define poolLength(pool) ((pool)->ptr - (pool)->start)
enum XML_ParamEntityParsing m_paramEntityParsing;
#endif
unsigned long m_hash_secret_salt;
+#ifdef XML_DTD
+ ACCOUNTING m_accounting;
+ ENTITY_STATS m_entity_stats;
+#endif
};
#define MALLOC(parser, s) (parser->m_mem.malloc_fcn((s)))
#ifdef _WIN32
+/* Provide declaration of rand_s() for MinGW-32 (not 64, which has it),
+ as it didn't declare it in its header prior to version 5.3.0 of its
+ runtime package (mingwrt, containing stdlib.h). The upstream fix
+ was introduced at https://osdn.net/projects/mingw/ticket/39658 . */
+# if defined(__MINGW32__) && defined(__MINGW32_VERSION) \
+ && __MINGW32_VERSION < 5003000L && ! defined(__MINGW64_VERSION_MAJOR)
+__declspec(dllimport) int rand_s(unsigned int *);
+# endif
+
/* Obtain entropy on Windows using the rand_s() function which
* generates cryptographically secure random numbers. Internally it
* uses RtlGenRandom API which is present in Windows XP and later.
static unsigned long
ENTROPY_DEBUG(const char *label, unsigned long entropy) {
- const char *const EXPAT_ENTROPY_DEBUG = getenv("EXPAT_ENTROPY_DEBUG");
- if (EXPAT_ENTROPY_DEBUG && ! strcmp(EXPAT_ENTROPY_DEBUG, "1")) {
- fprintf(stderr, "Entropy: %s --> 0x%0*lx (%lu bytes)\n", label,
+ if (getDebugLevel("EXPAT_ENTROPY_DEBUG", 0) >= 1u) {
+ fprintf(stderr, "expat: Entropy: %s --> 0x%0*lx (%lu bytes)\n", label,
(int)sizeof(entropy) * 2, entropy, (unsigned long)sizeof(entropy));
}
return entropy;
parser->m_paramEntityParsing = XML_PARAM_ENTITY_PARSING_NEVER;
#endif
parser->m_hash_secret_salt = 0;
+
+#ifdef XML_DTD
+ memset(&parser->m_accounting, 0, sizeof(ACCOUNTING));
+ parser->m_accounting.debugLevel = getDebugLevel("EXPAT_ACCOUNTING_DEBUG", 0u);
+ parser->m_accounting.maximumAmplificationFactor
+ = EXPAT_BILLION_LAUGHS_ATTACK_PROTECTION_MAXIMUM_AMPLIFICATION_DEFAULT;
+ parser->m_accounting.activationThresholdBytes
+ = EXPAT_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT;
+
+ memset(&parser->m_entity_stats, 0, sizeof(ENTITY_STATS));
+ parser->m_entity_stats.debugLevel = getDebugLevel("EXPAT_ENTITY_DEBUG", 0u);
+#endif
}
/* moves list of bindings to m_freeBindingList */
parser->m_useForeignDTD = useDTD;
return XML_ERROR_NONE;
#else
+ UNUSED_P(useDTD);
return XML_ERROR_FEATURE_REQUIRES_XML_DTD;
#endif
}
int nLeftOver;
enum XML_Status result;
/* Detect overflow (a+b > MAX <==> b > MAX-a) */
- if (len > ((XML_Size)-1) / 2 - parser->m_parseEndByteIndex) {
+ if ((XML_Size)len > ((XML_Size)-1) / 2 - parser->m_parseEndByteIndex) {
parser->m_errorCode = XML_ERROR_NO_MEMORY;
parser->m_eventPtr = parser->m_eventEndPtr = NULL;
parser->m_processor = errorProcessor;
parser->m_errorCode = XML_ERROR_FINISHED;
return XML_STATUS_ERROR;
case XML_INITIALIZED:
+ /* Has someone called XML_GetBuffer successfully before? */
+ if (! parser->m_bufferPtr) {
+ parser->m_errorCode = XML_ERROR_NO_BUFFER;
+ return XML_STATUS_ERROR;
+ }
+
if (parser->m_parentParser == NULL && ! startParsing(parser)) {
parser->m_errorCode = XML_ERROR_NO_MEMORY;
return XML_STATUS_ERROR;
(void)offset;
(void)size;
#endif /* defined XML_CONTEXT_BYTES */
- return (char *)0;
+ return (const char *)0;
}
XML_Size XMLCALL
/* Added in 2.2.5. */
case XML_ERROR_INVALID_ARGUMENT: /* Constant added in 2.2.1, already */
return XML_L("invalid argument");
+ /* Added in 2.3.0. */
+ case XML_ERROR_NO_BUFFER:
+ return XML_L(
+ "a successful prior call to function XML_GetBuffer is required");
+ /* Added in 2.4.0. */
+ case XML_ERROR_AMPLIFICATION_LIMIT_BREACH:
+ return XML_L(
+ "limit on input amplification factor (from DTD and entities) breached");
}
return NULL;
}
const XML_Feature *XMLCALL
XML_GetFeatureList(void) {
- static const XML_Feature features[]
- = {{XML_FEATURE_SIZEOF_XML_CHAR, XML_L("sizeof(XML_Char)"),
- sizeof(XML_Char)},
- {XML_FEATURE_SIZEOF_XML_LCHAR, XML_L("sizeof(XML_LChar)"),
- sizeof(XML_LChar)},
+ static const XML_Feature features[] = {
+ {XML_FEATURE_SIZEOF_XML_CHAR, XML_L("sizeof(XML_Char)"),
+ sizeof(XML_Char)},
+ {XML_FEATURE_SIZEOF_XML_LCHAR, XML_L("sizeof(XML_LChar)"),
+ sizeof(XML_LChar)},
#ifdef XML_UNICODE
- {XML_FEATURE_UNICODE, XML_L("XML_UNICODE"), 0},
+ {XML_FEATURE_UNICODE, XML_L("XML_UNICODE"), 0},
#endif
#ifdef XML_UNICODE_WCHAR_T
- {XML_FEATURE_UNICODE_WCHAR_T, XML_L("XML_UNICODE_WCHAR_T"), 0},
+ {XML_FEATURE_UNICODE_WCHAR_T, XML_L("XML_UNICODE_WCHAR_T"), 0},
#endif
#ifdef XML_DTD
- {XML_FEATURE_DTD, XML_L("XML_DTD"), 0},
+ {XML_FEATURE_DTD, XML_L("XML_DTD"), 0},
#endif
#ifdef XML_CONTEXT_BYTES
- {XML_FEATURE_CONTEXT_BYTES, XML_L("XML_CONTEXT_BYTES"),
- XML_CONTEXT_BYTES},
+ {XML_FEATURE_CONTEXT_BYTES, XML_L("XML_CONTEXT_BYTES"),
+ XML_CONTEXT_BYTES},
#endif
#ifdef XML_MIN_SIZE
- {XML_FEATURE_MIN_SIZE, XML_L("XML_MIN_SIZE"), 0},
+ {XML_FEATURE_MIN_SIZE, XML_L("XML_MIN_SIZE"), 0},
#endif
#ifdef XML_NS
- {XML_FEATURE_NS, XML_L("XML_NS"), 0},
+ {XML_FEATURE_NS, XML_L("XML_NS"), 0},
#endif
#ifdef XML_LARGE_SIZE
- {XML_FEATURE_LARGE_SIZE, XML_L("XML_LARGE_SIZE"), 0},
+ {XML_FEATURE_LARGE_SIZE, XML_L("XML_LARGE_SIZE"), 0},
#endif
#ifdef XML_ATTR_INFO
- {XML_FEATURE_ATTR_INFO, XML_L("XML_ATTR_INFO"), 0},
+ {XML_FEATURE_ATTR_INFO, XML_L("XML_ATTR_INFO"), 0},
+#endif
+#ifdef XML_DTD
+ /* Added in Expat 2.4.0. */
+ {XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_MAXIMUM_AMPLIFICATION_DEFAULT,
+ XML_L("XML_BLAP_MAX_AMP"),
+ (long int)
+ EXPAT_BILLION_LAUGHS_ATTACK_PROTECTION_MAXIMUM_AMPLIFICATION_DEFAULT},
+ {XML_FEATURE_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT,
+ XML_L("XML_BLAP_ACT_THRES"),
+ EXPAT_BILLION_LAUGHS_ATTACK_PROTECTION_ACTIVATION_THRESHOLD_DEFAULT},
#endif
- {XML_FEATURE_END, NULL, 0}};
+ {XML_FEATURE_END, NULL, 0}};
return features;
}
+#ifdef XML_DTD
+XML_Bool XMLCALL
+XML_SetBillionLaughsAttackProtectionMaximumAmplification(
+ XML_Parser parser, float maximumAmplificationFactor) {
+ if ((parser == NULL) || (parser->m_parentParser != NULL)
+ || isnan(maximumAmplificationFactor)
+ || (maximumAmplificationFactor < 1.0f)) {
+ return XML_FALSE;
+ }
+ parser->m_accounting.maximumAmplificationFactor = maximumAmplificationFactor;
+ return XML_TRUE;
+}
+
+XML_Bool XMLCALL
+XML_SetBillionLaughsAttackProtectionActivationThreshold(
+ XML_Parser parser, unsigned long long activationThresholdBytes) {
+ if ((parser == NULL) || (parser->m_parentParser != NULL)) {
+ return XML_FALSE;
+ }
+ parser->m_accounting.activationThresholdBytes = activationThresholdBytes;
+ return XML_TRUE;
+}
+#endif /* XML_DTD */
+
/* Initially tag->rawName always points into the parse buffer;
for those TAG instances opened while the current parse buffer was
processed, and not yet closed, we need to store tag->rawName in a more
static enum XML_Error PTRCALL
contentProcessor(XML_Parser parser, const char *start, const char *end,
const char **endPtr) {
- enum XML_Error result
- = doContent(parser, 0, parser->m_encoding, start, end, endPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer);
+ enum XML_Error result = doContent(
+ parser, 0, parser->m_encoding, start, end, endPtr,
+ (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_ACCOUNT_DIRECT);
if (result == XML_ERROR_NONE) {
if (! storeRawNames(parser))
return XML_ERROR_NO_MEMORY;
int tok = XmlContentTok(parser->m_encoding, start, end, &next);
switch (tok) {
case XML_TOK_BOM:
+#ifdef XML_DTD
+ if (! accountingDiffTolerated(parser, tok, start, next, __LINE__,
+ XML_ACCOUNT_DIRECT)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+#endif /* XML_DTD */
+
/* If we are at the end of the buffer, this would cause the next stage,
i.e. externalEntityInitProcessor3, to pass control directly to
doContent (by detecting XML_TOK_NONE) without processing any xml text
const char *next = start; /* XmlContentTok doesn't always set the last arg */
parser->m_eventPtr = start;
tok = XmlContentTok(parser->m_encoding, start, end, &next);
+ /* Note: These bytes are accounted later in:
+ - processXmlDecl
+ - externalEntityContentProcessor
+ */
parser->m_eventEndPtr = next;
switch (tok) {
const char *end, const char **endPtr) {
enum XML_Error result
= doContent(parser, 1, parser->m_encoding, start, end, endPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer);
+ (XML_Bool)! parser->m_parsingStatus.finalBuffer,
+ XML_ACCOUNT_ENTITY_EXPANSION);
if (result == XML_ERROR_NONE) {
if (! storeRawNames(parser))
return XML_ERROR_NO_MEMORY;
static enum XML_Error
doContent(XML_Parser parser, int startTagLevel, const ENCODING *enc,
const char *s, const char *end, const char **nextPtr,
- XML_Bool haveMore) {
+ XML_Bool haveMore, enum XML_Account account) {
/* save one level of indirection */
DTD *const dtd = parser->m_dtd;
for (;;) {
const char *next = s; /* XmlContentTok doesn't always set the last arg */
int tok = XmlContentTok(enc, s, end, &next);
+#ifdef XML_DTD
+ const char *accountAfter
+ = ((tok == XML_TOK_TRAILING_RSQB) || (tok == XML_TOK_TRAILING_CR))
+ ? (haveMore ? s /* i.e. 0 bytes */ : end)
+ : next;
+ if (! accountingDiffTolerated(parser, tok, s, accountAfter, __LINE__,
+ account)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+#endif
*eventEndPP = next;
switch (tok) {
case XML_TOK_TRAILING_CR:
XML_Char ch = (XML_Char)XmlPredefinedEntityName(
enc, s + enc->minBytesPerChar, next - enc->minBytesPerChar);
if (ch) {
+#ifdef XML_DTD
+ /* NOTE: We are replacing 4-6 characters original input for 1 character
+ * so there is no amplification and hence recording without
+ * protection. */
+ accountingDiffTolerated(parser, tok, (char *)&ch,
+ ((char *)&ch) + sizeof(XML_Char), __LINE__,
+ XML_ACCOUNT_ENTITY_EXPANSION);
+#endif /* XML_DTD */
if (parser->m_characterDataHandler)
parser->m_characterDataHandler(parser->m_handlerArg, &ch, 1);
else if (parser->m_defaultHandler)
}
tag->name.str = (XML_Char *)tag->buf;
*toPtr = XML_T('\0');
- result = storeAtts(parser, enc, s, &(tag->name), &(tag->bindings));
+ result
+ = storeAtts(parser, enc, s, &(tag->name), &(tag->bindings), account);
if (result)
return result;
if (parser->m_startElementHandler)
if (! name.str)
return XML_ERROR_NO_MEMORY;
poolFinish(&parser->m_tempPool);
- result = storeAtts(parser, enc, s, &name, &bindings);
+ result = storeAtts(parser, enc, s, &name, &bindings,
+ XML_ACCOUNT_NONE /* token spans whole start tag */);
if (result != XML_ERROR_NONE) {
freeBindings(parser, bindings);
return result;
/* END disabled code */
else if (parser->m_defaultHandler)
reportDefault(parser, enc, s, next);
- result = doCdataSection(parser, enc, &next, end, nextPtr, haveMore);
+ result
+ = doCdataSection(parser, enc, &next, end, nextPtr, haveMore, account);
if (result != XML_ERROR_NONE)
return result;
else if (! next) {
*/
static enum XML_Error
storeAtts(XML_Parser parser, const ENCODING *enc, const char *attStr,
- TAG_NAME *tagNamePtr, BINDING **bindingsPtr) {
+ TAG_NAME *tagNamePtr, BINDING **bindingsPtr,
+ enum XML_Account account) {
DTD *const dtd = parser->m_dtd; /* save one level of indirection */
ELEMENT_TYPE *elementType;
int nDefaultAtts;
/* normalize the attribute value */
result = storeAttributeValue(
parser, enc, isCdata, parser->m_atts[i].valuePtr,
- parser->m_atts[i].valueEnd, &parser->m_tempPool);
+ parser->m_atts[i].valueEnd, &parser->m_tempPool, account);
if (result)
return result;
appAtts[attIndex] = poolStart(&parser->m_tempPool);
static enum XML_Error PTRCALL
cdataSectionProcessor(XML_Parser parser, const char *start, const char *end,
const char **endPtr) {
- enum XML_Error result
- = doCdataSection(parser, parser->m_encoding, &start, end, endPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer);
+ enum XML_Error result = doCdataSection(
+ parser, parser->m_encoding, &start, end, endPtr,
+ (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_ACCOUNT_DIRECT);
if (result != XML_ERROR_NONE)
return result;
if (start) {
*/
static enum XML_Error
doCdataSection(XML_Parser parser, const ENCODING *enc, const char **startPtr,
- const char *end, const char **nextPtr, XML_Bool haveMore) {
+ const char *end, const char **nextPtr, XML_Bool haveMore,
+ enum XML_Account account) {
const char *s = *startPtr;
const char **eventPP;
const char **eventEndPP;
*startPtr = NULL;
for (;;) {
- const char *next;
+ const char *next = s; /* in case of XML_TOK_NONE or XML_TOK_PARTIAL */
int tok = XmlCdataSectionTok(enc, s, end, &next);
+#ifdef XML_DTD
+ if (! accountingDiffTolerated(parser, tok, s, next, __LINE__, account)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+#else
+ UNUSED_P(account);
+#endif
*eventEndPP = next;
switch (tok) {
case XML_TOK_CDATA_SECT_CLOSE:
static enum XML_Error
doIgnoreSection(XML_Parser parser, const ENCODING *enc, const char **startPtr,
const char *end, const char **nextPtr, XML_Bool haveMore) {
- const char *next;
+ const char *next = *startPtr; /* in case of XML_TOK_NONE or XML_TOK_PARTIAL */
int tok;
const char *s = *startPtr;
const char **eventPP;
*eventPP = s;
*startPtr = NULL;
tok = XmlIgnoreSectionTok(enc, s, end, &next);
+# ifdef XML_DTD
+ if (! accountingDiffTolerated(parser, tok, s, next, __LINE__,
+ XML_ACCOUNT_DIRECT)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+# endif
*eventEndPP = next;
switch (tok) {
case XML_TOK_IGNORE_SECT:
const char *versionend;
const XML_Char *storedversion = NULL;
int standalone = -1;
+
+#ifdef XML_DTD
+ if (! accountingDiffTolerated(parser, XML_TOK_XML_DECL, s, next, __LINE__,
+ XML_ACCOUNT_DIRECT)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+#endif
+
if (! (parser->m_ns ? XmlParseXmlDeclNS : XmlParseXmlDecl)(
isGeneralTextEntity, parser->m_encoding, s, next, &parser->m_eventPtr,
&version, &versionend, &encodingName, &newEncoding, &standalone)) {
for (;;) {
tok = XmlPrologTok(parser->m_encoding, start, end, &next);
+ /* Note: Except for XML_TOK_BOM below, these bytes are accounted later in:
+ - storeEntityValue
+ - processXmlDecl
+ */
parser->m_eventEndPtr = next;
if (tok <= 0) {
if (! parser->m_parsingStatus.finalBuffer && tok != XML_TOK_INVALID) {
break;
}
/* found end of entity value - can store it now */
- return storeEntityValue(parser, parser->m_encoding, s, end);
+ return storeEntityValue(parser, parser->m_encoding, s, end,
+ XML_ACCOUNT_DIRECT);
} else if (tok == XML_TOK_XML_DECL) {
enum XML_Error result;
result = processXmlDecl(parser, 0, start, next);
*/
else if (tok == XML_TOK_BOM && next == end
&& ! parser->m_parsingStatus.finalBuffer) {
+# ifdef XML_DTD
+ if (! accountingDiffTolerated(parser, tok, s, next, __LINE__,
+ XML_ACCOUNT_DIRECT)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+# endif
+
*nextPtr = next;
return XML_ERROR_NONE;
}
}
/* This would cause the next stage, i.e. doProlog to be passed XML_TOK_BOM.
However, when parsing an external subset, doProlog will not accept a BOM
- as valid, and report a syntax error, so we have to skip the BOM
+ as valid, and report a syntax error, so we have to skip the BOM, and
+ account for the BOM bytes.
*/
else if (tok == XML_TOK_BOM) {
+ if (! accountingDiffTolerated(parser, tok, s, next, __LINE__,
+ XML_ACCOUNT_DIRECT)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+
s = next;
tok = XmlPrologTok(parser->m_encoding, s, end, &next);
}
parser->m_processor = prologProcessor;
return doProlog(parser, parser->m_encoding, s, end, tok, next, nextPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_TRUE);
+ (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_TRUE,
+ XML_ACCOUNT_DIRECT);
}
static enum XML_Error PTRCALL
for (;;) {
tok = XmlPrologTok(enc, start, end, &next);
+ /* Note: These bytes are accounted later in:
+ - storeEntityValue
+ */
if (tok <= 0) {
if (! parser->m_parsingStatus.finalBuffer && tok != XML_TOK_INVALID) {
*nextPtr = s;
break;
}
/* found end of entity value - can store it now */
- return storeEntityValue(parser, enc, s, end);
+ return storeEntityValue(parser, enc, s, end, XML_ACCOUNT_DIRECT);
}
start = next;
}
const char *next = s;
int tok = XmlPrologTok(parser->m_encoding, s, end, &next);
return doProlog(parser, parser->m_encoding, s, end, tok, next, nextPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_TRUE);
+ (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_TRUE,
+ XML_ACCOUNT_DIRECT);
}
static enum XML_Error
doProlog(XML_Parser parser, const ENCODING *enc, const char *s, const char *end,
int tok, const char *next, const char **nextPtr, XML_Bool haveMore,
- XML_Bool allowClosingDoctype) {
+ XML_Bool allowClosingDoctype, enum XML_Account account) {
#ifdef XML_DTD
static const XML_Char externalSubsetName[] = {ASCII_HASH, '\0'};
#endif /* XML_DTD */
static const XML_Char enumValueSep[] = {ASCII_PIPE, '\0'};
static const XML_Char enumValueStart[] = {ASCII_LPAREN, '\0'};
+#ifndef XML_DTD
+ UNUSED_P(account);
+#endif
+
/* save one level of indirection */
DTD *const dtd = parser->m_dtd;
}
}
role = XmlTokenRole(&parser->m_prologState, tok, s, next, enc);
+#ifdef XML_DTD
+ switch (role) {
+ case XML_ROLE_INSTANCE_START: // bytes accounted in contentProcessor
+ case XML_ROLE_XML_DECL: // bytes accounted in processXmlDecl
+ case XML_ROLE_TEXT_DECL: // bytes accounted in processXmlDecl
+ break;
+ default:
+ if (! accountingDiffTolerated(parser, tok, s, next, __LINE__, account)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+ }
+#endif
switch (role) {
case XML_ROLE_XML_DECL: {
enum XML_Error result = processXmlDecl(parser, 0, s, next);
const XML_Char *attVal;
enum XML_Error result = storeAttributeValue(
parser, enc, parser->m_declAttributeIsCdata,
- s + enc->minBytesPerChar, next - enc->minBytesPerChar, &dtd->pool);
+ s + enc->minBytesPerChar, next - enc->minBytesPerChar, &dtd->pool,
+ XML_ACCOUNT_NONE);
if (result)
return result;
attVal = poolStart(&dtd->pool);
break;
case XML_ROLE_ENTITY_VALUE:
if (dtd->keepProcessing) {
- enum XML_Error result = storeEntityValue(
- parser, enc, s + enc->minBytesPerChar, next - enc->minBytesPerChar);
+ enum XML_Error result
+ = storeEntityValue(parser, enc, s + enc->minBytesPerChar,
+ next - enc->minBytesPerChar, XML_ACCOUNT_NONE);
if (parser->m_declEntity) {
parser->m_declEntity->textPtr = poolStart(&dtd->entityValuePool);
parser->m_declEntity->textLen
if (parser->m_externalEntityRefHandler) {
dtd->paramEntityRead = XML_FALSE;
entity->open = XML_TRUE;
+ entityTrackingOnOpen(parser, entity, __LINE__);
if (! parser->m_externalEntityRefHandler(
parser->m_externalEntityRefHandlerArg, 0, entity->base,
entity->systemId, entity->publicId)) {
+ entityTrackingOnClose(parser, entity, __LINE__);
entity->open = XML_FALSE;
return XML_ERROR_EXTERNAL_ENTITY_HANDLING;
}
+ entityTrackingOnClose(parser, entity, __LINE__);
entity->open = XML_FALSE;
handleDefault = XML_FALSE;
if (! dtd->paramEntityRead) {
for (;;) {
const char *next = NULL;
int tok = XmlPrologTok(parser->m_encoding, s, end, &next);
+#ifdef XML_DTD
+ if (! accountingDiffTolerated(parser, tok, s, next, __LINE__,
+ XML_ACCOUNT_DIRECT)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+#endif
parser->m_eventEndPtr = next;
switch (tok) {
/* report partial linebreak - it might be the last token */
return XML_ERROR_NO_MEMORY;
}
entity->open = XML_TRUE;
+#ifdef XML_DTD
+ entityTrackingOnOpen(parser, entity, __LINE__);
+#endif
entity->processed = 0;
openEntity->next = parser->m_openInternalEntities;
parser->m_openInternalEntities = openEntity;
openEntity->betweenDecl = betweenDecl;
openEntity->internalEventPtr = NULL;
openEntity->internalEventEndPtr = NULL;
- textStart = (char *)entity->textPtr;
- textEnd = (char *)(entity->textPtr + entity->textLen);
+ textStart = (const char *)entity->textPtr;
+ textEnd = (const char *)(entity->textPtr + entity->textLen);
/* Set a safe default value in case 'next' does not get set */
next = textStart;
int tok
= XmlPrologTok(parser->m_internalEncoding, textStart, textEnd, &next);
result = doProlog(parser, parser->m_internalEncoding, textStart, textEnd,
- tok, next, &next, XML_FALSE, XML_FALSE);
+ tok, next, &next, XML_FALSE, XML_FALSE,
+ XML_ACCOUNT_ENTITY_EXPANSION);
} else
#endif /* XML_DTD */
result = doContent(parser, parser->m_tagLevel, parser->m_internalEncoding,
- textStart, textEnd, &next, XML_FALSE);
+ textStart, textEnd, &next, XML_FALSE,
+ XML_ACCOUNT_ENTITY_EXPANSION);
if (result == XML_ERROR_NONE) {
if (textEnd != next && parser->m_parsingStatus.parsing == XML_SUSPENDED) {
entity->processed = (int)(next - textStart);
parser->m_processor = internalEntityProcessor;
} else {
+#ifdef XML_DTD
+ entityTrackingOnClose(parser, entity, __LINE__);
+#endif /* XML_DTD */
entity->open = XML_FALSE;
parser->m_openInternalEntities = openEntity->next;
/* put openEntity back in list of free instances */
return XML_ERROR_UNEXPECTED_STATE;
entity = openEntity->entity;
- textStart = ((char *)entity->textPtr) + entity->processed;
- textEnd = (char *)(entity->textPtr + entity->textLen);
+ textStart = ((const char *)entity->textPtr) + entity->processed;
+ textEnd = (const char *)(entity->textPtr + entity->textLen);
/* Set a safe default value in case 'next' does not get set */
next = textStart;
int tok
= XmlPrologTok(parser->m_internalEncoding, textStart, textEnd, &next);
result = doProlog(parser, parser->m_internalEncoding, textStart, textEnd,
- tok, next, &next, XML_FALSE, XML_TRUE);
+ tok, next, &next, XML_FALSE, XML_TRUE,
+ XML_ACCOUNT_ENTITY_EXPANSION);
} else
#endif /* XML_DTD */
result = doContent(parser, openEntity->startTagLevel,
parser->m_internalEncoding, textStart, textEnd, &next,
- XML_FALSE);
+ XML_FALSE, XML_ACCOUNT_ENTITY_EXPANSION);
if (result != XML_ERROR_NONE)
return result;
else if (textEnd != next
&& parser->m_parsingStatus.parsing == XML_SUSPENDED) {
- entity->processed = (int)(next - (char *)entity->textPtr);
+ entity->processed = (int)(next - (const char *)entity->textPtr);
return result;
} else {
+#ifdef XML_DTD
+ entityTrackingOnClose(parser, entity, __LINE__);
+#endif
entity->open = XML_FALSE;
parser->m_openInternalEntities = openEntity->next;
/* put openEntity back in list of free instances */
parser->m_processor = prologProcessor;
tok = XmlPrologTok(parser->m_encoding, s, end, &next);
return doProlog(parser, parser->m_encoding, s, end, tok, next, nextPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_TRUE);
+ (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_TRUE,
+ XML_ACCOUNT_DIRECT);
} else
#endif /* XML_DTD */
{
/* see externalEntityContentProcessor vs contentProcessor */
return doContent(parser, parser->m_parentParser ? 1 : 0, parser->m_encoding,
s, end, nextPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer);
+ (XML_Bool)! parser->m_parsingStatus.finalBuffer,
+ XML_ACCOUNT_DIRECT);
}
}
static enum XML_Error
storeAttributeValue(XML_Parser parser, const ENCODING *enc, XML_Bool isCdata,
- const char *ptr, const char *end, STRING_POOL *pool) {
+ const char *ptr, const char *end, STRING_POOL *pool,
+ enum XML_Account account) {
enum XML_Error result
- = appendAttributeValue(parser, enc, isCdata, ptr, end, pool);
+ = appendAttributeValue(parser, enc, isCdata, ptr, end, pool, account);
if (result)
return result;
if (! isCdata && poolLength(pool) && poolLastChar(pool) == 0x20)
static enum XML_Error
appendAttributeValue(XML_Parser parser, const ENCODING *enc, XML_Bool isCdata,
- const char *ptr, const char *end, STRING_POOL *pool) {
+ const char *ptr, const char *end, STRING_POOL *pool,
+ enum XML_Account account) {
DTD *const dtd = parser->m_dtd; /* save one level of indirection */
+#ifndef XML_DTD
+ UNUSED_P(account);
+#endif
+
for (;;) {
- const char *next;
+ const char *next
+ = ptr; /* XmlAttributeValueTok doesn't always set the last arg */
int tok = XmlAttributeValueTok(enc, ptr, end, &next);
+#ifdef XML_DTD
+ if (! accountingDiffTolerated(parser, tok, ptr, next, __LINE__, account)) {
+ accountingOnAbort(parser);
+ return XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ }
+#endif
switch (tok) {
case XML_TOK_NONE:
return XML_ERROR_NONE;
XML_Char ch = (XML_Char)XmlPredefinedEntityName(
enc, ptr + enc->minBytesPerChar, next - enc->minBytesPerChar);
if (ch) {
+#ifdef XML_DTD
+ /* NOTE: We are replacing 4-6 characters original input for 1 character
+ * so there is no amplification and hence recording without
+ * protection. */
+ accountingDiffTolerated(parser, tok, (char *)&ch,
+ ((char *)&ch) + sizeof(XML_Char), __LINE__,
+ XML_ACCOUNT_ENTITY_EXPANSION);
+#endif /* XML_DTD */
if (! poolAppendChar(pool, ch))
return XML_ERROR_NO_MEMORY;
break;
enum XML_Error result;
const XML_Char *textEnd = entity->textPtr + entity->textLen;
entity->open = XML_TRUE;
+#ifdef XML_DTD
+ entityTrackingOnOpen(parser, entity, __LINE__);
+#endif
result = appendAttributeValue(parser, parser->m_internalEncoding,
- isCdata, (char *)entity->textPtr,
- (char *)textEnd, pool);
+ isCdata, (const char *)entity->textPtr,
+ (const char *)textEnd, pool,
+ XML_ACCOUNT_ENTITY_EXPANSION);
+#ifdef XML_DTD
+ entityTrackingOnClose(parser, entity, __LINE__);
+#endif
entity->open = XML_FALSE;
if (result)
return result;
static enum XML_Error
storeEntityValue(XML_Parser parser, const ENCODING *enc,
- const char *entityTextPtr, const char *entityTextEnd) {
+ const char *entityTextPtr, const char *entityTextEnd,
+ enum XML_Account account) {
DTD *const dtd = parser->m_dtd; /* save one level of indirection */
STRING_POOL *pool = &(dtd->entityValuePool);
enum XML_Error result = XML_ERROR_NONE;
#ifdef XML_DTD
int oldInEntityValue = parser->m_prologState.inEntityValue;
parser->m_prologState.inEntityValue = 1;
+#else
+ UNUSED_P(account);
#endif /* XML_DTD */
/* never return Null for the value argument in EntityDeclHandler,
since this would indicate an external entity; therefore we
}
for (;;) {
- const char *next;
+ const char *next
+ = entityTextPtr; /* XmlEntityValueTok doesn't always set the last arg */
int tok = XmlEntityValueTok(enc, entityTextPtr, entityTextEnd, &next);
+
+#ifdef XML_DTD
+ if (! accountingDiffTolerated(parser, tok, entityTextPtr, next, __LINE__,
+ account)) {
+ accountingOnAbort(parser);
+ result = XML_ERROR_AMPLIFICATION_LIMIT_BREACH;
+ goto endEntityValue;
+ }
+#endif
+
switch (tok) {
case XML_TOK_PARAM_ENTITY_REF:
#ifdef XML_DTD
if (parser->m_externalEntityRefHandler) {
dtd->paramEntityRead = XML_FALSE;
entity->open = XML_TRUE;
+ entityTrackingOnOpen(parser, entity, __LINE__);
if (! parser->m_externalEntityRefHandler(
parser->m_externalEntityRefHandlerArg, 0, entity->base,
entity->systemId, entity->publicId)) {
+ entityTrackingOnClose(parser, entity, __LINE__);
entity->open = XML_FALSE;
result = XML_ERROR_EXTERNAL_ENTITY_HANDLING;
goto endEntityValue;
}
+ entityTrackingOnClose(parser, entity, __LINE__);
entity->open = XML_FALSE;
if (! dtd->paramEntityRead)
dtd->keepProcessing = dtd->standalone;
dtd->keepProcessing = dtd->standalone;
} else {
entity->open = XML_TRUE;
+ entityTrackingOnOpen(parser, entity, __LINE__);
result = storeEntityValue(
- parser, parser->m_internalEncoding, (char *)entity->textPtr,
- (char *)(entity->textPtr + entity->textLen));
+ parser, parser->m_internalEncoding, (const char *)entity->textPtr,
+ (const char *)(entity->textPtr + entity->textLen),
+ XML_ACCOUNT_ENTITY_EXPANSION);
+ entityTrackingOnClose(parser, entity, __LINE__);
entity->open = XML_FALSE;
if (result)
goto endEntityValue;
static void FASTCALL
hashTableIterInit(HASH_TABLE_ITER *iter, const HASH_TABLE *table) {
iter->p = table->v;
- iter->end = iter->p + table->size;
+ iter->end = iter->p ? iter->p + table->size : NULL;
}
static NAMED *FASTCALL
memcpy(result, s, charsRequired * sizeof(XML_Char));
return result;
}
+
+#ifdef XML_DTD
+
+static float
+accountingGetCurrentAmplification(XML_Parser rootParser) {
+ const XmlBigCount countBytesOutput
+ = rootParser->m_accounting.countBytesDirect
+ + rootParser->m_accounting.countBytesIndirect;
+ const float amplificationFactor
+ = rootParser->m_accounting.countBytesDirect
+ ? (countBytesOutput
+ / (float)(rootParser->m_accounting.countBytesDirect))
+ : 1.0f;
+ assert(! rootParser->m_parentParser);
+ return amplificationFactor;
+}
+
+static void
+accountingReportStats(XML_Parser originParser, const char *epilog) {
+ const XML_Parser rootParser = getRootParserOf(originParser, NULL);
+ assert(! rootParser->m_parentParser);
+
+ if (rootParser->m_accounting.debugLevel < 1) {
+ return;
+ }
+
+ const float amplificationFactor
+ = accountingGetCurrentAmplification(rootParser);
+ fprintf(stderr,
+ "expat: Accounting(%p): Direct " EXPAT_FMT_ULL(
+ "10") ", indirect " EXPAT_FMT_ULL("10") ", amplification %8.2f%s",
+ (void *)rootParser, rootParser->m_accounting.countBytesDirect,
+ rootParser->m_accounting.countBytesIndirect,
+ (double)amplificationFactor, epilog);
+}
+
+static void
+accountingOnAbort(XML_Parser originParser) {
+ accountingReportStats(originParser, " ABORTING\n");
+}
+
+static void
+accountingReportDiff(XML_Parser rootParser,
+ unsigned int levelsAwayFromRootParser, const char *before,
+ const char *after, ptrdiff_t bytesMore, int source_line,
+ enum XML_Account account) {
+ assert(! rootParser->m_parentParser);
+
+ fprintf(stderr,
+ " (+" EXPAT_FMT_PTRDIFF_T("6") " bytes %s|%d, xmlparse.c:%d) %*s\"",
+ bytesMore, (account == XML_ACCOUNT_DIRECT) ? "DIR" : "EXP",
+ levelsAwayFromRootParser, source_line, 10, "");
+
+ const char ellipis[] = "[..]";
+ const size_t ellipsisLength = sizeof(ellipis) /* because compile-time */ - 1;
+ const unsigned int contextLength = 10;
+
+ /* Note: Performance is of no concern here */
+ const char *walker = before;
+ if ((rootParser->m_accounting.debugLevel >= 3)
+ || (after - before)
+ <= (ptrdiff_t)(contextLength + ellipsisLength + contextLength)) {
+ for (; walker < after; walker++) {
+ fprintf(stderr, "%s", unsignedCharToPrintable(walker[0]));
+ }
+ } else {
+ for (; walker < before + contextLength; walker++) {
+ fprintf(stderr, "%s", unsignedCharToPrintable(walker[0]));
+ }
+ fprintf(stderr, ellipis);
+ walker = after - contextLength;
+ for (; walker < after; walker++) {
+ fprintf(stderr, "%s", unsignedCharToPrintable(walker[0]));
+ }
+ }
+ fprintf(stderr, "\"\n");
+}
+
+static XML_Bool
+accountingDiffTolerated(XML_Parser originParser, int tok, const char *before,
+ const char *after, int source_line,
+ enum XML_Account account) {
+ /* Note: We need to check the token type *first* to be sure that
+ * we can even access variable <after>, safely.
+ * E.g. for XML_TOK_NONE <after> may hold an invalid pointer. */
+ switch (tok) {
+ case XML_TOK_INVALID:
+ case XML_TOK_PARTIAL:
+ case XML_TOK_PARTIAL_CHAR:
+ case XML_TOK_NONE:
+ return XML_TRUE;
+ }
+
+ if (account == XML_ACCOUNT_NONE)
+ return XML_TRUE; /* because these bytes have been accounted for, already */
+
+ unsigned int levelsAwayFromRootParser;
+ const XML_Parser rootParser
+ = getRootParserOf(originParser, &levelsAwayFromRootParser);
+ assert(! rootParser->m_parentParser);
+
+ const int isDirect
+ = (account == XML_ACCOUNT_DIRECT) && (originParser == rootParser);
+ const ptrdiff_t bytesMore = after - before;
+
+ XmlBigCount *const additionTarget
+ = isDirect ? &rootParser->m_accounting.countBytesDirect
+ : &rootParser->m_accounting.countBytesIndirect;
+
+ /* Detect and avoid integer overflow */
+ if (*additionTarget > (XmlBigCount)(-1) - (XmlBigCount)bytesMore)
+ return XML_FALSE;
+ *additionTarget += bytesMore;
+
+ const XmlBigCount countBytesOutput
+ = rootParser->m_accounting.countBytesDirect
+ + rootParser->m_accounting.countBytesIndirect;
+ const float amplificationFactor
+ = accountingGetCurrentAmplification(rootParser);
+ const XML_Bool tolerated
+ = (countBytesOutput < rootParser->m_accounting.activationThresholdBytes)
+ || (amplificationFactor
+ <= rootParser->m_accounting.maximumAmplificationFactor);
+
+ if (rootParser->m_accounting.debugLevel >= 2) {
+ accountingReportStats(rootParser, "");
+ accountingReportDiff(rootParser, levelsAwayFromRootParser, before, after,
+ bytesMore, source_line, account);
+ }
+
+ return tolerated;
+}
+
+unsigned long long
+testingAccountingGetCountBytesDirect(XML_Parser parser) {
+ if (! parser)
+ return 0;
+ return parser->m_accounting.countBytesDirect;
+}
+
+unsigned long long
+testingAccountingGetCountBytesIndirect(XML_Parser parser) {
+ if (! parser)
+ return 0;
+ return parser->m_accounting.countBytesIndirect;
+}
+
+static void
+entityTrackingReportStats(XML_Parser rootParser, ENTITY *entity,
+ const char *action, int sourceLine) {
+ assert(! rootParser->m_parentParser);
+ if (rootParser->m_entity_stats.debugLevel < 1)
+ return;
+
+# if defined(XML_UNICODE)
+ const char *const entityName = "[..]";
+# else
+ const char *const entityName = entity->name;
+# endif
+
+ fprintf(
+ stderr,
+ "expat: Entities(%p): Count %9d, depth %2d/%2d %*s%s%s; %s length %d (xmlparse.c:%d)\n",
+ (void *)rootParser, rootParser->m_entity_stats.countEverOpened,
+ rootParser->m_entity_stats.currentDepth,
+ rootParser->m_entity_stats.maximumDepthSeen,
+ (rootParser->m_entity_stats.currentDepth - 1) * 2, "",
+ entity->is_param ? "%" : "&", entityName, action, entity->textLen,
+ sourceLine);
+}
+
+static void
+entityTrackingOnOpen(XML_Parser originParser, ENTITY *entity, int sourceLine) {
+ const XML_Parser rootParser = getRootParserOf(originParser, NULL);
+ assert(! rootParser->m_parentParser);
+
+ rootParser->m_entity_stats.countEverOpened++;
+ rootParser->m_entity_stats.currentDepth++;
+ if (rootParser->m_entity_stats.currentDepth
+ > rootParser->m_entity_stats.maximumDepthSeen) {
+ rootParser->m_entity_stats.maximumDepthSeen++;
+ }
+
+ entityTrackingReportStats(rootParser, entity, "OPEN ", sourceLine);
+}
+
+static void
+entityTrackingOnClose(XML_Parser originParser, ENTITY *entity, int sourceLine) {
+ const XML_Parser rootParser = getRootParserOf(originParser, NULL);
+ assert(! rootParser->m_parentParser);
+
+ entityTrackingReportStats(rootParser, entity, "CLOSE", sourceLine);
+ rootParser->m_entity_stats.currentDepth--;
+}
+
+static XML_Parser
+getRootParserOf(XML_Parser parser, unsigned int *outLevelDiff) {
+ XML_Parser rootParser = parser;
+ unsigned int stepsTakenUpwards = 0;
+ while (rootParser->m_parentParser) {
+ rootParser = rootParser->m_parentParser;
+ stepsTakenUpwards++;
+ }
+ assert(! rootParser->m_parentParser);
+ if (outLevelDiff != NULL) {
+ *outLevelDiff = stepsTakenUpwards;
+ }
+ return rootParser;
+}
+
+const char *
+unsignedCharToPrintable(unsigned char c) {
+ switch (c) {
+ case 0:
+ return "\\0";
+ case 1:
+ return "\\x1";
+ case 2:
+ return "\\x2";
+ case 3:
+ return "\\x3";
+ case 4:
+ return "\\x4";
+ case 5:
+ return "\\x5";
+ case 6:
+ return "\\x6";
+ case 7:
+ return "\\x7";
+ case 8:
+ return "\\x8";
+ case 9:
+ return "\\t";
+ case 10:
+ return "\\n";
+ case 11:
+ return "\\xB";
+ case 12:
+ return "\\xC";
+ case 13:
+ return "\\r";
+ case 14:
+ return "\\xE";
+ case 15:
+ return "\\xF";
+ case 16:
+ return "\\x10";
+ case 17:
+ return "\\x11";
+ case 18:
+ return "\\x12";
+ case 19:
+ return "\\x13";
+ case 20:
+ return "\\x14";
+ case 21:
+ return "\\x15";
+ case 22:
+ return "\\x16";
+ case 23:
+ return "\\x17";
+ case 24:
+ return "\\x18";
+ case 25:
+ return "\\x19";
+ case 26:
+ return "\\x1A";
+ case 27:
+ return "\\x1B";
+ case 28:
+ return "\\x1C";
+ case 29:
+ return "\\x1D";
+ case 30:
+ return "\\x1E";
+ case 31:
+ return "\\x1F";
+ case 32:
+ return " ";
+ case 33:
+ return "!";
+ case 34:
+ return "\\\"";
+ case 35:
+ return "#";
+ case 36:
+ return "$";
+ case 37:
+ return "%";
+ case 38:
+ return "&";
+ case 39:
+ return "'";
+ case 40:
+ return "(";
+ case 41:
+ return ")";
+ case 42:
+ return "*";
+ case 43:
+ return "+";
+ case 44:
+ return ",";
+ case 45:
+ return "-";
+ case 46:
+ return ".";
+ case 47:
+ return "/";
+ case 48:
+ return "0";
+ case 49:
+ return "1";
+ case 50:
+ return "2";
+ case 51:
+ return "3";
+ case 52:
+ return "4";
+ case 53:
+ return "5";
+ case 54:
+ return "6";
+ case 55:
+ return "7";
+ case 56:
+ return "8";
+ case 57:
+ return "9";
+ case 58:
+ return ":";
+ case 59:
+ return ";";
+ case 60:
+ return "<";
+ case 61:
+ return "=";
+ case 62:
+ return ">";
+ case 63:
+ return "?";
+ case 64:
+ return "@";
+ case 65:
+ return "A";
+ case 66:
+ return "B";
+ case 67:
+ return "C";
+ case 68:
+ return "D";
+ case 69:
+ return "E";
+ case 70:
+ return "F";
+ case 71:
+ return "G";
+ case 72:
+ return "H";
+ case 73:
+ return "I";
+ case 74:
+ return "J";
+ case 75:
+ return "K";
+ case 76:
+ return "L";
+ case 77:
+ return "M";
+ case 78:
+ return "N";
+ case 79:
+ return "O";
+ case 80:
+ return "P";
+ case 81:
+ return "Q";
+ case 82:
+ return "R";
+ case 83:
+ return "S";
+ case 84:
+ return "T";
+ case 85:
+ return "U";
+ case 86:
+ return "V";
+ case 87:
+ return "W";
+ case 88:
+ return "X";
+ case 89:
+ return "Y";
+ case 90:
+ return "Z";
+ case 91:
+ return "[";
+ case 92:
+ return "\\\\";
+ case 93:
+ return "]";
+ case 94:
+ return "^";
+ case 95:
+ return "_";
+ case 96:
+ return "`";
+ case 97:
+ return "a";
+ case 98:
+ return "b";
+ case 99:
+ return "c";
+ case 100:
+ return "d";
+ case 101:
+ return "e";
+ case 102:
+ return "f";
+ case 103:
+ return "g";
+ case 104:
+ return "h";
+ case 105:
+ return "i";
+ case 106:
+ return "j";
+ case 107:
+ return "k";
+ case 108:
+ return "l";
+ case 109:
+ return "m";
+ case 110:
+ return "n";
+ case 111:
+ return "o";
+ case 112:
+ return "p";
+ case 113:
+ return "q";
+ case 114:
+ return "r";
+ case 115:
+ return "s";
+ case 116:
+ return "t";
+ case 117:
+ return "u";
+ case 118:
+ return "v";
+ case 119:
+ return "w";
+ case 120:
+ return "x";
+ case 121:
+ return "y";
+ case 122:
+ return "z";
+ case 123:
+ return "{";
+ case 124:
+ return "|";
+ case 125:
+ return "}";
+ case 126:
+ return "~";
+ case 127:
+ return "\\x7F";
+ case 128:
+ return "\\x80";
+ case 129:
+ return "\\x81";
+ case 130:
+ return "\\x82";
+ case 131:
+ return "\\x83";
+ case 132:
+ return "\\x84";
+ case 133:
+ return "\\x85";
+ case 134:
+ return "\\x86";
+ case 135:
+ return "\\x87";
+ case 136:
+ return "\\x88";
+ case 137:
+ return "\\x89";
+ case 138:
+ return "\\x8A";
+ case 139:
+ return "\\x8B";
+ case 140:
+ return "\\x8C";
+ case 141:
+ return "\\x8D";
+ case 142:
+ return "\\x8E";
+ case 143:
+ return "\\x8F";
+ case 144:
+ return "\\x90";
+ case 145:
+ return "\\x91";
+ case 146:
+ return "\\x92";
+ case 147:
+ return "\\x93";
+ case 148:
+ return "\\x94";
+ case 149:
+ return "\\x95";
+ case 150:
+ return "\\x96";
+ case 151:
+ return "\\x97";
+ case 152:
+ return "\\x98";
+ case 153:
+ return "\\x99";
+ case 154:
+ return "\\x9A";
+ case 155:
+ return "\\x9B";
+ case 156:
+ return "\\x9C";
+ case 157:
+ return "\\x9D";
+ case 158:
+ return "\\x9E";
+ case 159:
+ return "\\x9F";
+ case 160:
+ return "\\xA0";
+ case 161:
+ return "\\xA1";
+ case 162:
+ return "\\xA2";
+ case 163:
+ return "\\xA3";
+ case 164:
+ return "\\xA4";
+ case 165:
+ return "\\xA5";
+ case 166:
+ return "\\xA6";
+ case 167:
+ return "\\xA7";
+ case 168:
+ return "\\xA8";
+ case 169:
+ return "\\xA9";
+ case 170:
+ return "\\xAA";
+ case 171:
+ return "\\xAB";
+ case 172:
+ return "\\xAC";
+ case 173:
+ return "\\xAD";
+ case 174:
+ return "\\xAE";
+ case 175:
+ return "\\xAF";
+ case 176:
+ return "\\xB0";
+ case 177:
+ return "\\xB1";
+ case 178:
+ return "\\xB2";
+ case 179:
+ return "\\xB3";
+ case 180:
+ return "\\xB4";
+ case 181:
+ return "\\xB5";
+ case 182:
+ return "\\xB6";
+ case 183:
+ return "\\xB7";
+ case 184:
+ return "\\xB8";
+ case 185:
+ return "\\xB9";
+ case 186:
+ return "\\xBA";
+ case 187:
+ return "\\xBB";
+ case 188:
+ return "\\xBC";
+ case 189:
+ return "\\xBD";
+ case 190:
+ return "\\xBE";
+ case 191:
+ return "\\xBF";
+ case 192:
+ return "\\xC0";
+ case 193:
+ return "\\xC1";
+ case 194:
+ return "\\xC2";
+ case 195:
+ return "\\xC3";
+ case 196:
+ return "\\xC4";
+ case 197:
+ return "\\xC5";
+ case 198:
+ return "\\xC6";
+ case 199:
+ return "\\xC7";
+ case 200:
+ return "\\xC8";
+ case 201:
+ return "\\xC9";
+ case 202:
+ return "\\xCA";
+ case 203:
+ return "\\xCB";
+ case 204:
+ return "\\xCC";
+ case 205:
+ return "\\xCD";
+ case 206:
+ return "\\xCE";
+ case 207:
+ return "\\xCF";
+ case 208:
+ return "\\xD0";
+ case 209:
+ return "\\xD1";
+ case 210:
+ return "\\xD2";
+ case 211:
+ return "\\xD3";
+ case 212:
+ return "\\xD4";
+ case 213:
+ return "\\xD5";
+ case 214:
+ return "\\xD6";
+ case 215:
+ return "\\xD7";
+ case 216:
+ return "\\xD8";
+ case 217:
+ return "\\xD9";
+ case 218:
+ return "\\xDA";
+ case 219:
+ return "\\xDB";
+ case 220:
+ return "\\xDC";
+ case 221:
+ return "\\xDD";
+ case 222:
+ return "\\xDE";
+ case 223:
+ return "\\xDF";
+ case 224:
+ return "\\xE0";
+ case 225:
+ return "\\xE1";
+ case 226:
+ return "\\xE2";
+ case 227:
+ return "\\xE3";
+ case 228:
+ return "\\xE4";
+ case 229:
+ return "\\xE5";
+ case 230:
+ return "\\xE6";
+ case 231:
+ return "\\xE7";
+ case 232:
+ return "\\xE8";
+ case 233:
+ return "\\xE9";
+ case 234:
+ return "\\xEA";
+ case 235:
+ return "\\xEB";
+ case 236:
+ return "\\xEC";
+ case 237:
+ return "\\xED";
+ case 238:
+ return "\\xEE";
+ case 239:
+ return "\\xEF";
+ case 240:
+ return "\\xF0";
+ case 241:
+ return "\\xF1";
+ case 242:
+ return "\\xF2";
+ case 243:
+ return "\\xF3";
+ case 244:
+ return "\\xF4";
+ case 245:
+ return "\\xF5";
+ case 246:
+ return "\\xF6";
+ case 247:
+ return "\\xF7";
+ case 248:
+ return "\\xF8";
+ case 249:
+ return "\\xF9";
+ case 250:
+ return "\\xFA";
+ case 251:
+ return "\\xFB";
+ case 252:
+ return "\\xFC";
+ case 253:
+ return "\\xFD";
+ case 254:
+ return "\\xFE";
+ case 255:
+ return "\\xFF";
+ default:
+ assert(0); /* never gets here */
+ return "dead code";
+ }
+ assert(0); /* never gets here */
+}
+
+#endif /* XML_DTD */
+
+static unsigned long
+getDebugLevel(const char *variableName, unsigned long defaultDebugLevel) {
+ const char *const valueOrNull = getenv(variableName);
+ if (valueOrNull == NULL) {
+ return defaultDebugLevel;
+ }
+ const char *const value = valueOrNull;
+
+ errno = 0;
+ char *afterValue = (char *)value;
+ unsigned long debugLevel = strtoul(value, &afterValue, 10);
+ if ((errno != 0) || (afterValue[0] != '\0')) {
+ errno = 0;
+ return defaultDebugLevel;
+ }
+
+ return debugLevel;
+}
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Greg Stein <gstein@users.sourceforge.net>
+ Copyright (c) 2002-2006 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2002-2003 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2005-2009 Steven Solie <ssolie@users.sourceforge.net>
+ Copyright (c) 2016-2021 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2017 Rhodri James <rhodri@wildebeest.org.uk>
+ Copyright (c) 2019 David Loffredo <loffredo@steptools.com>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
#ifdef _WIN32
# include "winconfig.h"
-#else
-# ifdef HAVE_EXPAT_CONFIG_H
-# include <expat_config.h>
-# endif
-#endif /* ndef _WIN32 */
+#endif
+
+#include <expat_config.h>
#include "expat_external.h"
#include "internal.h"
#ifdef XML_DTD
if (! state->documentEntity && tok == XML_TOK_PARAM_ENTITY_REF)
return XML_ROLE_INNER_PARAM_ENTITY_REF;
+#else
+ UNUSED_P(tok);
#endif
state->handler = error;
return XML_ROLE_ERROR;
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2002 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2017 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2001-2003 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2002 Greg Stein <gstein@users.sourceforge.net>
+ Copyright (c) 2002-2016 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2005-2009 Steven Solie <ssolie@users.sourceforge.net>
+ Copyright (c) 2016-2021 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2016 Pascal Cuoq <cuoq@trust-in-soft.com>
+ Copyright (c) 2016 Don Lewis <truckman@apache.org>
+ Copyright (c) 2017 Rhodri James <rhodri@wildebeest.org.uk>
+ Copyright (c) 2017 Alexander Bluhm <alexander.bluhm@gmx.net>
+ Copyright (c) 2017 Benbuck Nason <bnason@netflix.com>
+ Copyright (c) 2017 José Gutiérrez de la Concha <jose@zeroc.com>
+ Copyright (c) 2019 David Loffredo <loffredo@steptools.com>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
-#ifdef _WIN32
-# include "winconfig.h"
-#else
-# ifdef HAVE_EXPAT_CONFIG_H
-# include <expat_config.h>
-# endif
-#endif /* ndef _WIN32 */
-
#include <stddef.h>
#include <string.h> /* memcpy */
+#include <stdbool.h>
-#if defined(_MSC_VER) && (_MSC_VER <= 1700)
-/* for vs2012/11.0/1700 and earlier Visual Studio compilers */
-# define bool int
-# define false 0
-# define true 1
-#else
-# include <stdbool.h>
+#ifdef _WIN32
+# include "winconfig.h"
#endif
+#include <expat_config.h>
+
#include "expat_external.h"
#include "internal.h"
#include "xmltok.h"
#define IS_NAME_CHAR(enc, p, n) (AS_NORMAL_ENCODING(enc)->isName##n(enc, p))
#define IS_NMSTRT_CHAR(enc, p, n) (AS_NORMAL_ENCODING(enc)->isNmstrt##n(enc, p))
-#define IS_INVALID_CHAR(enc, p, n) \
- (AS_NORMAL_ENCODING(enc)->isInvalid##n(enc, p))
+#ifdef XML_MIN_SIZE
+# define IS_INVALID_CHAR(enc, p, n) \
+ (AS_NORMAL_ENCODING(enc)->isInvalid##n \
+ && AS_NORMAL_ENCODING(enc)->isInvalid##n(enc, p))
+#else
+# define IS_INVALID_CHAR(enc, p, n) \
+ (AS_NORMAL_ENCODING(enc)->isInvalid##n(enc, p))
+#endif
#ifdef XML_MIN_SIZE
# define IS_NAME_CHAR_MINBPC(enc, p) \
static int PTRFASTCALL
unicode_byte_type(char hi, char lo) {
switch ((unsigned char)hi) {
- /* 0xD800–0xDBFF first 16-bit code unit or high surrogate (W1) */
+ /* 0xD800-0xDBFF first 16-bit code unit or high surrogate (W1) */
case 0xD8:
case 0xD9:
case 0xDA:
case 0xDB:
return BT_LEAD4;
- /* 0xDC00–0xDFFF second 16-bit code unit or low surrogate (W2) */
+ /* 0xDC00-0xDFFF second 16-bit code unit or low surrogate (W2) */
case 0xDC:
case 0xDD:
case 0xDE:
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2002-2005 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2016-2017 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2017 Rhodri James <rhodri@wildebeest.org.uk>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
-/* This file is included!
+/* This file is included (from xmltok.c, 1-3 times depending on XML_MIN_SIZE)!
__ __ _
___\ \/ /_ __ __ _| |_
/ _ \\ /| '_ \ / _` | __|
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2002-2016 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2016-2021 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2017 Rhodri James <rhodri@wildebeest.org.uk>
+ Copyright (c) 2018 Benjamin Peterson <benjamin@python.org>
+ Copyright (c) 2018 Anton Maklakov <antmak.pub@gmail.com>
+ Copyright (c) 2019 David Loffredo <loffredo@steptools.com>
+ Copyright (c) 2020 Boris Kolpackov <boris@codesynthesis.com>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
#ifdef XML_TOK_IMPL_C
-# ifndef IS_INVALID_CHAR
+# ifndef IS_INVALID_CHAR // i.e. for UTF-16 and XML_MIN_SIZE not defined
# define IS_INVALID_CHAR(enc, ptr, n) (0)
# endif
# define LEAD_CASE(n) \
case BT_LEAD##n: \
ptr += n; \
+ pos->columnNumber++; \
break;
LEAD_CASE(2)
LEAD_CASE(3)
LEAD_CASE(4)
# undef LEAD_CASE
case BT_LF:
- pos->columnNumber = (XML_Size)-1;
+ pos->columnNumber = 0;
pos->lineNumber++;
ptr += MINBPC(enc);
break;
ptr += MINBPC(enc);
if (HAS_CHAR(enc, ptr, end) && BYTE_TYPE(enc, ptr) == BT_LF)
ptr += MINBPC(enc);
- pos->columnNumber = (XML_Size)-1;
+ pos->columnNumber = 0;
break;
default:
ptr += MINBPC(enc);
+ pos->columnNumber++;
break;
}
- pos->columnNumber++;
}
}
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2017-2019 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
|_| XML parser
Copyright (c) 1997-2000 Thai Open Source Software Center Ltd
- Copyright (c) 2000-2017 Expat development team
+ Copyright (c) 2000 Clark Cooper <coopercc@users.sourceforge.net>
+ Copyright (c) 2002 Greg Stein <gstein@users.sourceforge.net>
+ Copyright (c) 2002 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
+ Copyright (c) 2002-2006 Karl Waclawek <karl@waclawek.net>
+ Copyright (c) 2017 Sebastian Pipping <sebastian@pipping.org>
Licensed under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining
static const ENCODING *
NS(findEncoding)(const ENCODING *enc, const char *ptr, const char *end) {
# define ENCODING_MAX 128
- char buf[ENCODING_MAX] = {0};
+ char buf[ENCODING_MAX];
char *p = buf;
int i;
XmlUtf8Convert(enc, &ptr, end, &p, p + ENCODING_MAX - 1);
too, and they can do ordinary things with weakrefs that end up resurrecting
CT while gc is running.
- http://www.python.org/sf/1055820
+ https://www.python.org/sf/1055820
shows how innocent it can be, and also how nasty. Variants of the three
focussed test cases attached to that bug report are now part of Python's
tee_fromiterable(PyObject *iterable)
{
teeobject *to;
- PyObject *it = NULL;
+ PyObject *it;
it = PyObject_GetIter(iterable);
if (it == NULL)
goto done;
}
- to = PyObject_GC_New(teeobject, &tee_type);
- if (to == NULL)
- goto done;
- to->dataobj = (teedataobject *)teedataobject_newinternal(it);
- if (!to->dataobj) {
- PyObject_GC_Del(to);
+ PyObject *dataobj = teedataobject_newinternal(it);
+ if (!dataobj) {
to = NULL;
goto done;
}
-
+ to = PyObject_GC_New(teeobject, &tee_type);
+ if (to == NULL) {
+ Py_DECREF(dataobj);
+ goto done;
+ }
+ to->dataobj = (teedataobject *)dataobj;
to->index = 0;
to->weakreflist = NULL;
PyObject_GC_Track(to);
done:
- Py_XDECREF(it);
+ Py_DECREF(it);
return (PyObject *)to;
}
static PyObject *
instancemethod_call(PyObject *self, PyObject *arg, PyObject *kw)
{
- return PyObject_Call(PyMethod_GET_FUNCTION(self), arg, kw);
+ return PyObject_Call(PyInstanceMethod_GET_FUNCTION(self), arg, kw);
}
static PyObject *
static Py_complex
c_powi(Py_complex x, long n)
{
- Py_complex cn;
-
- if (n > 100 || n < -100) {
- cn.real = (double) n;
- cn.imag = 0.;
- return _Py_c_pow(x,cn);
- }
- else if (n > 0)
+ if (n > 0)
return c_powu(x,n);
else
return _Py_c_quot(c_1, c_powu(x,-n));
complex_pow(PyObject *v, PyObject *w, PyObject *z)
{
Py_complex p;
- Py_complex exponent;
- long int_exponent;
Py_complex a, b;
TO_COMPLEX(v, a);
TO_COMPLEX(w, b);
return NULL;
}
errno = 0;
- exponent = b;
- int_exponent = (long)exponent.real;
- if (exponent.imag == 0. && exponent.real == int_exponent)
- p = c_powi(a, int_exponent);
- else
- p = _Py_c_pow(a, exponent);
+ // Check whether the exponent has a small integer value, and if so use
+ // a faster and more accurate algorithm.
+ if (b.imag == 0.0 && b.real == floor(b.real) && fabs(b.real) <= 100.0) {
+ p = c_powi(a, (long)b.real);
+ }
+ else {
+ p = _Py_c_pow(a, b);
+ }
Py_ADJUST_ERANGE2(p.real, p.imag);
if (errno == EDOM) {
bits lsb, lsb-2, lsb-3, lsb-4, ... is 1. */
if ((digit & half_eps) != 0) {
round_up = 0;
- if ((digit & (3*half_eps-1)) != 0 ||
- (half_eps == 8 && (HEX_DIGIT(key_digit+1) & 1) != 0))
+ if ((digit & (3*half_eps-1)) != 0 || (half_eps == 8 &&
+ key_digit+1 < ndigits && (HEX_DIGIT(key_digit+1) & 1) != 0))
round_up = 1;
else
for (i = key_digit-1; i >= 0; i--)
if (_PyObject_GC_IS_TRACKED(f))
_PyObject_GC_UNTRACK(f);
- Py_TRASHCAN_SAFE_BEGIN(f)
+ Py_TRASHCAN_BEGIN(f, frame_dealloc);
/* Kill all local variables */
valuestack = f->f_valuestack;
for (p = f->f_localsplus; p < valuestack; p++)
}
Py_DECREF(co);
- Py_TRASHCAN_SAFE_END(f)
+ Py_TRASHCAN_END;
}
static inline Py_ssize_t
return NULL;
}
if (!setup_ga(self, origin, arguments)) {
- type->tp_free((PyObject *)self);
+ Py_DECREF(self);
return NULL;
}
return (PyObject *)self;
PyObject *
Py_GenericAlias(PyObject *origin, PyObject *args)
{
- gaobject *alias = PyObject_GC_New(gaobject, &Py_GenericAliasType);
+ gaobject *alias = (gaobject*) PyType_GenericAlloc(
+ (PyTypeObject *)&Py_GenericAliasType, 0);
if (alias == NULL) {
return NULL;
}
if (!setup_ga(alias, origin, args)) {
- PyObject_GC_Del((PyObject *)alias);
+ Py_DECREF(alias);
return NULL;
}
- _PyObject_GC_TRACK(alias);
return (PyObject *)alias;
}
static PyObject *
list_new_prealloc(Py_ssize_t size)
{
+ assert(size > 0);
PyListObject *op = (PyListObject *) PyList_New(0);
- if (size == 0 || op == NULL) {
- return (PyObject *) op;
+ if (op == NULL) {
+ return NULL;
}
assert(op->ob_item == NULL);
op->ob_item = PyMem_New(PyObject *, size);
PyObject **src, **dest;
Py_ssize_t i, len;
len = ihigh - ilow;
+ if (len <= 0) {
+ return PyList_New(0);
+ }
np = (PyListObject *) list_new_prealloc(len);
if (np == NULL)
return NULL;
if (Py_SIZE(a) > PY_SSIZE_T_MAX - Py_SIZE(b))
return PyErr_NoMemory();
size = Py_SIZE(a) + Py_SIZE(b);
+ if (size == 0) {
+ return PyList_New(0);
+ }
np = (PyListObject *) list_new_prealloc(size);
if (np == NULL) {
return NULL;
static void
meth_dealloc(PyCFunctionObject *m)
{
- _PyObject_GC_UNTRACK(m);
+ // The Py_TRASHCAN mechanism requires that we be able to
+ // call PyObject_GC_UnTrack twice on an object.
+ PyObject_GC_UnTrack(m);
+ Py_TRASHCAN_BEGIN(m, meth_dealloc);
if (m->m_weakreflist != NULL) {
PyObject_ClearWeakRefs((PyObject*) m);
}
Py_XDECREF(m->m_self);
Py_XDECREF(m->m_module);
PyObject_GC_Del(m);
+ Py_TRASHCAN_END;
}
static PyObject *
if (range == NULL)
goto err;
/* return the result */
- return Py_BuildValue("N(N)i", _PyEval_GetBuiltinId(&PyId_iter),
+ return Py_BuildValue("N(N)l", _PyEval_GetBuiltinId(&PyId_iter),
range, r->index);
err:
Py_XDECREF(start);
/* Extract the type again; tp_del may have changed it */
type = Py_TYPE(self);
+ // Don't read type memory after calling basedealloc() since basedealloc()
+ // can deallocate the type and free its memory.
+ int type_needs_decref = (type->tp_flags & Py_TPFLAGS_HEAPTYPE
+ && !(base->tp_flags & Py_TPFLAGS_HEAPTYPE));
+
/* Call the base tp_dealloc() */
assert(basedealloc);
basedealloc(self);
- /* Only decref if the base type is not already a heap allocated type.
- Otherwise, basedealloc should have decref'd it already */
- if (type->tp_flags & Py_TPFLAGS_HEAPTYPE && !(base->tp_flags & Py_TPFLAGS_HEAPTYPE))
+ /* Can't reference self beyond this point. It's possible tp_del switched
+ our type from a HEAPTYPE to a non-HEAPTYPE, so be careful about
+ reference counting. Only decref if the base type is not already a heap
+ allocated type. Otherwise, basedealloc should have decref'd it already */
+ if (type_needs_decref) {
Py_DECREF(type);
+ }
/* Done */
return;
if (_PyType_IS_GC(base)) {
_PyObject_GC_TRACK(self);
}
+
+ // Don't read type memory after calling basedealloc() since basedealloc()
+ // can deallocate the type and free its memory.
+ int type_needs_decref = (type->tp_flags & Py_TPFLAGS_HEAPTYPE
+ && !(base->tp_flags & Py_TPFLAGS_HEAPTYPE));
+
assert(basedealloc);
basedealloc(self);
our type from a HEAPTYPE to a non-HEAPTYPE, so be careful about
reference counting. Only decref if the base type is not already a heap
allocated type. Otherwise, basedealloc should have decref'd it already */
- if (type->tp_flags & Py_TPFLAGS_HEAPTYPE && !(base->tp_flags & Py_TPFLAGS_HEAPTYPE))
- Py_DECREF(type);
+ if (type_needs_decref) {
+ Py_DECREF(type);
+ }
endlabel:
Py_TRASHCAN_END
return NULL;
PyObject *obj = PyWeakref_GET_OBJECT(proxy);
+ if (!PyIter_Check(obj)) {
+ PyErr_Format(PyExc_TypeError,
+ "Weakref proxy referenced a non-iterator '%.200s' object",
+ Py_TYPE(obj)->tp_name);
+ return NULL;
+ }
Py_INCREF(obj);
PyObject* res = PyIter_Next(obj);
Py_DECREF(obj);
};
-static Py_hash_t
-proxy_hash(PyObject *self)
-{
- PyWeakReference *proxy = (PyWeakReference *)self;
- if (!proxy_checkref(proxy)) {
- return -1;
- }
- PyObject *obj = PyWeakref_GET_OBJECT(proxy);
- Py_INCREF(obj);
- Py_hash_t res = PyObject_Hash(obj);
- Py_DECREF(obj);
- return res;
-}
-
-
PyTypeObject
_PyWeakref_ProxyType = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
&proxy_as_number, /* tp_as_number */
&proxy_as_sequence, /* tp_as_sequence */
&proxy_as_mapping, /* tp_as_mapping */
- proxy_hash, /* tp_hash */
+// Notice that tp_hash is intentionally omitted as proxies are "mutable" (when the reference dies).
+ 0, /* tp_hash */
0, /* tp_call */
proxy_str, /* tp_str */
proxy_getattr, /* tp_getattro */
\r
set libraries=\r
set libraries=%libraries% bzip2-1.0.6\r
-if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi\r
-if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1k\r
+if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.3.0\r
+if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1l\r
set libraries=%libraries% sqlite-3.35.5.0\r
if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.9.0\r
if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.9.0\r
echo.Fetching external binaries...\r
\r
set binaries=\r
-if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi\r
-if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1k-1\r
+if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi-3.3.0\r
+if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1l\r
if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.9.0\r
if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06\r
\r
set BUILD_ARM64=\r
set BUILD_PDB=\r
set BUILD_NOOPT=\r
+set COPY_LICENSE=\r
set INSTALL_CYGWIN=\r
\r
:CheckOpts\r
if /I "%1"=="-arm64" (set BUILD_ARM64=1) & shift & goto :CheckOpts\r
if /I "%1"=="-pdb" (set BUILD_PDB=-g) & shift & goto :CheckOpts\r
if /I "%1"=="-noopt" (set BUILD_NOOPT=CFLAGS='-Od -warn all') & shift & goto :CheckOpts\r
+if /I "%1"=="-license" (set COPY_LICENSE=1) & shift & goto :CheckOpts\r
if /I "%1"=="-?" goto :Usage\r
if /I "%1"=="--install-cygwin" (set INSTALL_CYGWIN=1) & shift & goto :CheckOpts\r
goto :Usage\r
set BUILD_X86=1\r
set BUILD_ARM32=1\r
set BUILD_ARM64=1\r
+ set COPY_LICENSE=1\r
)\r
\r
if "%INSTALL_CYGWIN%"=="1" call :InstallCygwin\r
)\r
\r
if "%BUILD_X64%"=="1" call :BuildOne x64 x86_64-w64-cygwin x86_64-w64-cygwin\r
+if errorlevel 1 exit /B %ERRORLEVEL%\r
if "%BUILD_X86%"=="1" call :BuildOne x86 i686-pc-cygwin i686-pc-cygwin\r
+if errorlevel 1 exit /B %ERRORLEVEL%\r
if "%BUILD_ARM32%"=="1" call :BuildOne x86_arm i686-pc-cygwin arm-w32-cygwin\r
+if errorlevel 1 exit /B %ERRORLEVEL%\r
if "%BUILD_ARM64%"=="1" call :BuildOne x86_arm64 i686-pc-cygwin aarch64-w64-cygwin\r
+if errorlevel 1 exit /B %ERRORLEVEL%\r
+if "%COPY_LICENSE%"=="1" copy /y "%LIBFFI_SOURCE%\LICENSE" "%LIBFFI_OUT%\LICENSE"\r
\r
popd\r
endlocal\r
\r
echo copying files to %_LIBFFI_OUT%\r
if not exist %_LIBFFI_OUT%\include (md %_LIBFFI_OUT%\include)\r
-copy %ARTIFACTS%\.libs\libffi-7.dll %_LIBFFI_OUT%\r
-copy %ARTIFACTS%\.libs\libffi-7.lib %_LIBFFI_OUT%\r
-copy %ARTIFACTS%\.libs\libffi-7.pdb %_LIBFFI_OUT%\r
-copy %ARTIFACTS%\fficonfig.h %_LIBFFI_OUT%\include\r
-copy %ARTIFACTS%\include\*.h %_LIBFFI_OUT%\include\r
+copy %ARTIFACTS%\.libs\libffi-*.dll %_LIBFFI_OUT% || exit /B 1\r
+copy %ARTIFACTS%\.libs\libffi-*.lib %_LIBFFI_OUT% || exit /B 1\r
+copy %ARTIFACTS%\.libs\libffi-*.pdb %_LIBFFI_OUT%\r
+copy %ARTIFACTS%\fficonfig.h %_LIBFFI_OUT%\include || exit /B 1\r
+copy %ARTIFACTS%\include\*.h %_LIBFFI_OUT%\include || exit /B 1\r
\r
endlocal\r
exit /b\r
\r
We set BasePlatformToolset for ICC's benefit, it's otherwise ignored.\r
-->\r
+ <BasePlatformToolset Condition="'$(BasePlatformToolset)' == '' and ('$(MSBuildToolsVersion)' == '17.0' or '$(VisualStudioVersion)' == '17.0')">v142</BasePlatformToolset>\r
<BasePlatformToolset Condition="'$(BasePlatformToolset)' == '' and ('$(MSBuildToolsVersion)' == '16.0' or '$(VisualStudioVersion)' == '16.0')">v142</BasePlatformToolset>\r
<BasePlatformToolset Condition="'$(BasePlatformToolset)' == '' and ('$(MSBuildToolsVersion)' == '15.0' or '$(VisualStudioVersion)' == '15.0')">v141</BasePlatformToolset>\r
<BasePlatformToolset Condition="'$(BasePlatformToolset)' == '' and '$(VCTargetsPath14)' != ''">v140</BasePlatformToolset>\r
<sqlite3Dir>$(ExternalsDir)sqlite-3.35.5.0\</sqlite3Dir>\r
<bz2Dir>$(ExternalsDir)bzip2-1.0.6\</bz2Dir>\r
<lzmaDir>$(ExternalsDir)xz-5.2.2\</lzmaDir>\r
- <libffiDir>$(ExternalsDir)libffi\</libffiDir>\r
- <libffiOutDir>$(ExternalsDir)libffi\$(ArchName)\</libffiOutDir>\r
+ <libffiDir>$(ExternalsDir)libffi-3.3.0\</libffiDir>\r
+ <libffiOutDir>$(ExternalsDir)libffi-3.3.0\$(ArchName)\</libffiOutDir>\r
<libffiIncludeDir>$(libffiOutDir)include</libffiIncludeDir>\r
- <opensslDir>$(ExternalsDir)openssl-1.1.1k\</opensslDir>\r
- <opensslOutDir>$(ExternalsDir)openssl-bin-1.1.1k-1\$(ArchName)\</opensslOutDir>\r
+ <opensslDir>$(ExternalsDir)openssl-1.1.1l\</opensslDir>\r
+ <opensslOutDir>$(ExternalsDir)openssl-bin-1.1.1l\$(ArchName)\</opensslOutDir>\r
<opensslIncludeDir>$(opensslOutDir)include</opensslIncludeDir>\r
<nasmDir>$(ExternalsDir)\nasm-2.11.06\</nasmDir>\r
<zlibDir>$(ExternalsDir)\zlib-1.2.11\</zlibDir>\r
\r
<!-- The minimum allowed SDK version to use for building -->\r
<DefaultWindowsSDKVersion>10.0.10586.0</DefaultWindowsSDKVersion>\r
- <DefaultWindowsSDKVersion Condition="$([System.Version]::Parse($(_RegistryVersion))) > $([System.Version]::Parse($(DefaultWindowsSDKVersion)))">$(_RegistryVersion)</DefaultWindowsSDKVersion>\r
+ <DefaultWindowsSDKVersion Condition="$(_RegistryVersion) != '' and $([System.Version]::Parse($(_RegistryVersion))) > $([System.Version]::Parse($(DefaultWindowsSDKVersion)))">$(_RegistryVersion)</DefaultWindowsSDKVersion>\r
</PropertyGroup>\r
- \r
+\r
+ <Target Name="_CheckWindowsSDKFound" BeforeTargets="_CheckWindowsSDKInstalled" Condition="$(_RegistryVersion) == ''">\r
+ <PropertyGroup>\r
+ <_Message>Failed to locate a Windows SDK installation.</_Message>\r
+ <_Message>$(_Message) If the build fails, please use the Visual Studio Installer to install the Windows SDK.</_Message>\r
+ <_Message>$(_Message) (Ignore the version number specified in the error message and select the latest.)</_Message>\r
+ </PropertyGroup>\r
+ <Warning Text="$(_Message)" />\r
+ </Target>\r
+\r
<PropertyGroup Condition="$(WindowsTargetPlatformVersion) == ''">\r
<WindowsTargetPlatformVersion>$(DefaultWindowsSDKVersion)</WindowsTargetPlatformVersion>\r
</PropertyGroup>\r
<ReleaseLevelNumber Condition="$(_ReleaseLevel) == 'b'">11</ReleaseLevelNumber>\r
<ReleaseLevelNumber Condition="$(_ReleaseLevel) == 'rc'">12</ReleaseLevelNumber>\r
</PropertyGroup>\r
- \r
+\r
<PropertyGroup>\r
<PythonVersionNumber>$(MajorVersionNumber).$(MinorVersionNumber).$(MicroVersionNumber)</PythonVersionNumber>\r
<PythonVersion>$(MajorVersionNumber).$(MinorVersionNumber).$(MicroVersionNumber)$(ReleaseLevelName)</PythonVersion>\r
return _res;
}
-// invalid_import_from_targets: import_from_as_names ','
+// invalid_import_from_targets: import_from_as_names ',' NEWLINE
static void *
invalid_import_from_targets_rule(Parser *p)
{
}
void * _res = NULL;
int _mark = p->mark;
- { // import_from_as_names ','
+ { // import_from_as_names ',' NEWLINE
if (p->error_indicator) {
D(p->level--);
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_names ','"));
+ D(fprintf(stderr, "%*c> invalid_import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_names ',' NEWLINE"));
Token * _literal;
asdl_seq* import_from_as_names_var;
+ Token * newline_var;
if (
(import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
+ &&
+ (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ invalid_import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_names ','"));
+ D(fprintf(stderr, "%*c+ invalid_import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_names ',' NEWLINE"));
_res = RAISE_SYNTAX_ERROR ( "trailing comma not allowed without surrounding parentheses" );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_import_from_targets[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_names ','"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_names ',' NEWLINE"));
}
_res = NULL;
done:
/* Fix locations for the given node and its children.
`parent` is the enclosing node.
+ `expr_start` is the starting position of the expression (pointing to the open brace).
`n` is the node which locations are going to be fixed relative to parent.
`expr_str` is the child node's string representation, including braces.
*/
static bool
-fstring_find_expr_location(Token *parent, char *expr_str, int *p_lines, int *p_cols)
+fstring_find_expr_location(Token *parent, const char* expr_start, char *expr_str, int *p_lines, int *p_cols)
{
*p_lines = 0;
*p_cols = 0;
+ assert(expr_start != NULL && *expr_start == '{');
if (parent && parent->bytes) {
char *parent_str = PyBytes_AsString(parent->bytes);
if (!parent_str) {
return false;
}
- char *substr = strstr(parent_str, expr_str);
- if (substr) {
- // The following is needed, in order to correctly shift the column
- // offset, in the case that (disregarding any whitespace) a newline
- // immediately follows the opening curly brace of the fstring expression.
- bool newline_after_brace = 1;
- char *start = substr + 1;
- while (start && *start != '}' && *start != '\n') {
- if (*start != ' ' && *start != '\t' && *start != '\f') {
- newline_after_brace = 0;
- break;
- }
- start++;
+ // The following is needed, in order to correctly shift the column
+ // offset, in the case that (disregarding any whitespace) a newline
+ // immediately follows the opening curly brace of the fstring expression.
+ bool newline_after_brace = 1;
+ const char *start = expr_start + 1;
+ while (start && *start != '}' && *start != '\n') {
+ if (*start != ' ' && *start != '\t' && *start != '\f') {
+ newline_after_brace = 0;
+ break;
}
+ start++;
+ }
- // Account for the characters from the last newline character to our
- // left until the beginning of substr.
- if (!newline_after_brace) {
- start = substr;
- while (start > parent_str && *start != '\n') {
- start--;
- }
- *p_cols += (int)(substr - start);
+ // Account for the characters from the last newline character to our
+ // left until the beginning of expr_start.
+ if (!newline_after_brace) {
+ start = expr_start;
+ while (start > parent_str && *start != '\n') {
+ start--;
}
- /* adjust the start based on the number of newlines encountered
- before the f-string expression */
- for (char* p = parent_str; p < substr; p++) {
- if (*p == '\n') {
- (*p_lines)++;
- }
+ *p_cols += (int)(expr_start - start);
+ }
+ /* adjust the start based on the number of newlines encountered
+ before the f-string expression */
+ for (const char *p = parent_str; p < expr_start; p++) {
+ if (*p == '\n') {
+ (*p_lines)++;
}
}
}
len = expr_end - expr_start;
/* Allocate 3 extra bytes: open paren, close paren, null byte. */
- str = PyMem_Malloc(len + 3);
+ str = PyMem_Calloc(len + 3, sizeof(char));
if (str == NULL) {
PyErr_NoMemory();
return NULL;
// The call to fstring_find_expr_location is responsible for finding the column offset
// the generated AST nodes need to be shifted to the right, which is equal to the number
- // of the f-string characters before the expression starts. In order to correctly compute
- // this offset, strstr gets called in fstring_find_expr_location which only succeeds
- // if curly braces appear before and after the f-string expression (exactly like they do
- // in the f-string itself), hence the following lines.
- str[0] = '{';
+ // of the f-string characters before the expression starts.
memcpy(str+1, expr_start, len);
- str[len+1] = '}';
- str[len+2] = 0;
-
int lines, cols;
- if (!fstring_find_expr_location(t, str, &lines, &cols)) {
- PyMem_FREE(str);
+ if (!fstring_find_expr_location(t, expr_start-1, str+1, &lines, &cols)) {
+ PyMem_Free(str);
return NULL;
}
"Non-UTF-8 code starting with '\\x%.2x' "
"in file %U on line %i, "
"but no encoding declared; "
- "see http://python.org/dev/peps/pep-0263/ for details",
+ "see https://python.org/dev/peps/pep-0263/ for details",
badchar, tok->filename, tok->lineno + 1);
return error_ret(tok);
}
/* If this is a first successful replacement, create new_bases list and
copy previously encountered bases. */
if (!(new_bases = PyList_New(i))) {
+ Py_DECREF(new_base);
goto error;
}
for (j = 0; j < i; j++) {
}
j = PyList_GET_SIZE(new_bases);
if (PyList_SetSlice(new_bases, j, j, new_base) < 0) {
+ Py_DECREF(new_base);
goto error;
}
Py_DECREF(new_base);
builtin___build_class__(PyObject *self, PyObject *const *args, Py_ssize_t nargs,
PyObject *kwnames)
{
- PyObject *func, *name, *bases, *mkw, *meta, *winner, *prep, *ns, *orig_bases;
- PyObject *cls = NULL, *cell = NULL;
+ PyObject *func, *name, *winner, *prep;
+ PyObject *cls = NULL, *cell = NULL, *ns = NULL, *meta = NULL, *orig_bases = NULL;
+ PyObject *mkw = NULL, *bases = NULL;
int isclass = 0; /* initialize to prevent gcc warning */
if (nargs < 2) {
else {
mkw = _PyStack_AsDict(args + nargs, kwnames);
if (mkw == NULL) {
- Py_DECREF(bases);
- return NULL;
+ goto error;
}
meta = _PyDict_GetItemIdWithError(mkw, &PyId_metaclass);
if (meta != NULL) {
Py_INCREF(meta);
if (_PyDict_DelItemId(mkw, &PyId_metaclass) < 0) {
- Py_DECREF(meta);
- Py_DECREF(mkw);
- Py_DECREF(bases);
- return NULL;
+ goto error;
}
/* metaclass is explicitly given, check if it's indeed a class */
isclass = PyType_Check(meta);
}
else if (PyErr_Occurred()) {
- Py_DECREF(mkw);
- Py_DECREF(bases);
- return NULL;
+ goto error;
}
}
if (meta == NULL) {
winner = (PyObject *)_PyType_CalculateMetaclass((PyTypeObject *)meta,
bases);
if (winner == NULL) {
- Py_DECREF(meta);
- Py_XDECREF(mkw);
- Py_DECREF(bases);
- return NULL;
+ goto error;
}
if (winner != meta) {
Py_DECREF(meta);
Py_DECREF(prep);
}
if (ns == NULL) {
- Py_DECREF(meta);
- Py_XDECREF(mkw);
- Py_DECREF(bases);
- return NULL;
+ goto error;
}
if (!PyMapping_Check(ns)) {
PyErr_Format(PyExc_TypeError,
}
error:
Py_XDECREF(cell);
- Py_DECREF(ns);
- Py_DECREF(meta);
+ Py_XDECREF(ns);
+ Py_XDECREF(meta);
Py_XDECREF(mkw);
- Py_DECREF(bases);
if (bases != orig_bases) {
Py_DECREF(orig_bases);
}
+ Py_DECREF(bases);
return cls;
}
prtrace(PyThreadState *tstate, PyObject *v, const char *str)
{
printf("%s ", str);
+ PyObject *type, *value, *traceback;
+ PyErr_Fetch(&type, &value, &traceback);
if (PyObject_Print(v, stdout, 0) != 0) {
/* Don't know what else to do */
_PyErr_Clear(tstate);
}
printf("\n");
+ PyErr_Restore(type, value, traceback);
return 1;
}
#endif
/* Except block for __anext__ */
compiler_use_next_block(c, except);
+
+ /* We don't want to trace the END_ASYNC_FOR, so make sure
+ * that it has the same lineno as the following instruction. */
+ if (asdl_seq_LEN(s->v.For.orelse)) {
+ SET_LOC(c, (stmt_ty)asdl_seq_GET(s->v.For.orelse, 0));
+ }
ADDOP(c, END_ASYNC_FOR);
/* `else` block */
}
static PyObject*
-_PyErr_CreateException(PyObject *exception, PyObject *value)
+_PyErr_CreateException(PyObject *exception_type, PyObject *value)
{
+ PyObject *exc;
+
if (value == NULL || value == Py_None) {
- return _PyObject_CallNoArg(exception);
+ exc = _PyObject_CallNoArg(exception_type);
}
else if (PyTuple_Check(value)) {
- return PyObject_Call(exception, value, NULL);
+ exc = PyObject_Call(exception_type, value, NULL);
}
else {
- return PyObject_CallOneArg(exception, value);
+ exc = PyObject_CallOneArg(exception_type, value);
+ }
+
+ if (exc != NULL && !PyExceptionInstance_Check(exc)) {
+ PyErr_Format(PyExc_TypeError,
+ "calling %R should have returned an instance of "
+ "BaseException, not %s",
+ exception_type, Py_TYPE(exc)->tp_name);
+ Py_CLEAR(exc);
}
+
+ return exc;
}
void
value = fixed_value;
}
- /* Avoid reference cycles through the context chain.
+ /* Avoid creating new reference cycles through the
+ context chain, while taking care not to hang on
+ pre-existing ones.
This is O(chain length) but context chains are
usually very short. Sensitive readers may try
to inline the call to PyException_GetContext. */
if (exc_value != value) {
PyObject *o = exc_value, *context;
+ PyObject *slow_o = o; /* Floyd's cycle detection algo */
+ int slow_update_toggle = 0;
while ((context = PyException_GetContext(o))) {
Py_DECREF(context);
if (context == value) {
break;
}
o = context;
+ if (o == slow_o) {
+ /* pre-existing cycle - all exceptions on the
+ path were visited and checked. */
+ break;
+ }
+ if (slow_update_toggle) {
+ slow_o = PyException_GetContext(slow_o);
+ Py_DECREF(slow_o);
+ }
+ slow_update_toggle = !slow_update_toggle;
}
PyException_SetContext(value, exc_value);
}
return 0;
}
-#ifdef __linux__
+#ifdef O_PATH
if (errno == EBADF) {
- // On Linux, ioctl(FIOCLEX) will fail with EBADF for O_PATH file descriptors
- // Fall through to the fcntl() path
+ // bpo-44849: On Linux and FreeBSD, ioctl(FIOCLEX) fails with EBADF
+ // on O_PATH file descriptors. Fall through to the fcntl()
+ // implementation.
}
else
#endif
{
char buf[BUFSIZ];
WFILE wf;
+ if (PySys_Audit("marshal.dumps", "Oi", x, version) < 0) {
+ return; /* caller must check PyErr_Occurred() */
+ }
memset(&wf, 0, sizeof(wf));
wf.fp = fp;
wf.ptr = wf.buf = buf;
wf.end = wf.ptr + sizeof(buf);
wf.error = WFERR_OK;
wf.version = version;
- if (w_init_refs(&wf, version))
- return; /* caller mush check PyErr_Occurred() */
+ if (w_init_refs(&wf, version)) {
+ return; /* caller must check PyErr_Occurred() */
+ }
w_object(x, &wf);
w_clear_refs(&wf);
w_flush(&wf);
if (lnotab == NULL)
goto code_error;
- if (PySys_Audit("code.__new__", "OOOiiiiii",
- code, filename, name, argcount, posonlyargcount,
- kwonlyargcount, nlocals, stacksize, flags) < 0) {
- goto code_error;
- }
-
v = (PyObject *) PyCode_NewWithPosOnlyArgs(
argcount, posonlyargcount, kwonlyargcount,
nlocals, stacksize, flags,
fprintf(stderr, "XXX readobject called with exception set\n");
return NULL;
}
+ if (p->ptr && p->end) {
+ if (PySys_Audit("marshal.loads", "y#", p->ptr, (Py_ssize_t)(p->end - p->ptr)) < 0) {
+ return NULL;
+ }
+ } else if (p->fp || p->readable) {
+ if (PySys_Audit("marshal.load", NULL) < 0) {
+ return NULL;
+ }
+ }
v = r_object(p);
if (v == NULL && !PyErr_Occurred())
PyErr_SetString(PyExc_TypeError, "NULL object in marshal data for object");
rf.refs = PyList_New(0);
if (rf.refs == NULL)
return NULL;
- result = r_object(&rf);
+ result = read_object(&rf);
Py_DECREF(rf.refs);
if (rf.buf != NULL)
PyMem_FREE(rf.buf);
rf.refs = PyList_New(0);
if (rf.refs == NULL)
return NULL;
- result = r_object(&rf);
+ result = read_object(&rf);
Py_DECREF(rf.refs);
if (rf.buf != NULL)
PyMem_FREE(rf.buf);
{
WFILE wf;
+ if (PySys_Audit("marshal.dumps", "Oi", x, version) < 0) {
+ return NULL;
+ }
memset(&wf, 0, sizeof(wf));
wf.str = PyBytes_FromStringAndSize((char *)NULL, 50);
if (wf.str == NULL)
PUTS(fd, "Stack (most recent call first):\n");
}
- frame = PyThreadState_GetFrame(tstate);
+ // Use a borrowed reference. Avoid Py_INCREF/Py_DECREF, since this function
+ // can be called in a signal handler by the faulthandler module which must
+ // not modify Python objects.
+ frame = tstate->frame;
if (frame == NULL) {
PUTS(fd, "<no Python frame>\n");
return;
depth = 0;
while (1) {
if (MAX_FRAME_DEPTH <= depth) {
- Py_DECREF(frame);
PUTS(fd, " ...\n");
break;
}
if (!PyFrame_Check(frame)) {
- Py_DECREF(frame);
break;
}
dump_frame(fd, frame);
- PyFrameObject *back = PyFrame_GetBack(frame);
- Py_DECREF(frame);
+ PyFrameObject *back = frame->f_back;
if (back == NULL) {
break;
-This is Python version 3.9.6
+This is Python version 3.9.7
============================
.. image:: https://travis-ci.org/python/cpython.svg?branch=3.9
-------------------------
If you have a proposal to change Python, you may want to send an email to the
-comp.lang.python or `python-ideas`_ mailing lists for initial feedback. A
+`comp.lang.python`_ or `python-ideas`_ mailing lists for initial feedback. A
Python Enhancement Proposal (PEP) may be submitted if your idea gains ground.
All current PEPs, as well as guidelines for submitting a new PEP, are listed at
`python.org/dev/peps/ <https://www.python.org/dev/peps/>`_.
.. _python-ideas: https://mail.python.org/mailman/listinfo/python-ideas/
+.. _comp.lang.python: https://mail.python.org/mailman/listinfo/python-list
Release Schedule
---Guido van Rossum (home page: http://www.python.org/~guido/)
+--Guido van Rossum (home page: https://www.python.org/~guido/)
pygettext searches only for _() by default, but see the -k/--keyword flag
below for how to augment this.
- [1] http://www.python.org/workshops/1997-10/proceedings/loewis.html
- [2] http://www.gnu.org/software/gettext/gettext.html
+ [1] https://www.python.org/workshops/1997-10/proceedings/loewis.html
+ [2] https://www.gnu.org/software/gettext/gettext.html
NOTE: pygettext attempts to be option and feature compatible with GNU
xgettext where ever possible. However some options are still missing or are
by providing a unique URI for this property. It does not need to be an
active internet address. Defaults to $(ComputerName).
- Official releases use http://www.python.org/(architecture name)
+ Official releases use https://www.python.org/(architecture name)
/p:DownloadUrlBase=(any URI)
Specifies the base of a URL where missing parts of the installer layout
rem The following substitutions will be applied to the release URI:\r
rem Variable Description Example\r
rem {arch} architecture amd64, win32\r
-set RELEASE_URI=http://www.python.org/{arch}\r
+set RELEASE_URI=https://www.python.org/{arch}\r
\r
rem This is the URL that will be used to download installation files.\r
rem The files available from the default URL *will* conflict with your\r
Version="$(var.Version)"
IconSourceFile="..\..\..\PC\icons\setup.ico"
Manufacturer="!(loc.Manufacturer)"
- AboutUrl="http://www.python.org/"
+ AboutUrl="https://www.python.org/"
Compressed="no"
dep:ProviderKey="CPython-$(var.MajorVersionNumber).$(var.MinorVersionNumber)$(var.PyArchExt)$(var.PyTestExt)">
<BootstrapperApplication Id="PythonBA" SourceFile="$(var.BootstrapApp)">
<String Id="NoDowngrade">A newer version of !(loc.ProductName) is already installed.</String>
<String Id="IncorrectCore">An incorrect version of a prerequisite package is installed. Please uninstall any other versions of !(loc.ProductName) and try installing this again.</String>
<String Id="NoTargetDir">The TARGETDIR variable must be provided when invoking this installer.</String>
- <String Id="ManufacturerSupportUrl">http://www.python.org/</String>
+ <String Id="ManufacturerSupportUrl">https://www.python.org/</String>
</WixLocalization>
<String Id="ShortDescriptor">executable</String>
<String Id="ShortcutName">Python {{ShortVersion}} ({{Bitness}})</String>
<String Id="ShortcutDescription">Launches the !(loc.ProductName) interpreter.</String>
- <String Id="SupportUrl">http://www.python.org/</String>
+ <String Id="SupportUrl">https://www.python.org/</String>
</WixLocalization>
that intend to bundle Python should rebuild these modules with their\r
own URI to avoid conflicting with the official releases.\r
\r
- The official releases use "http://www.python.org/$(ArchName)"\r
+ The official releases use "https://www.python.org/$(ArchName)"\r
\r
This is not the same as the DownloadUrl property used in the bundle\r
projects.\r
]
OPENSSL_RECENT_VERSIONS = [
- "1.1.1k",
- # "3.0.0-alpha14"
+ "1.1.1l",
+ "3.0.0-beta1"
]
LIBRESSL_OLD_VERSIONS = [
help="Keep original sources for debugging."
)
-OPENSSL_FIPS_CNF = """\
-openssl_conf = openssl_init
-
-.include {self.install_dir}/ssl/fipsinstall.cnf
-# .include {self.install_dir}/ssl/openssl.cnf
-
-[openssl_init]
-providers = provider_sect
-
-[provider_sect]
-fips = fips_sect
-default = default_sect
-
-[default_sect]
-activate = 1
-"""
-
class AbstractBuilder(object):
library = None
log.info("Unpacking files to {}".format(self.build_dir))
tf.extractall(self.build_dir, members)
- def _build_src(self):
+ def _build_src(self, config_args=()):
"""Now build openssl"""
log.info("Running build in {}".format(self.build_dir))
cwd = self.build_dir
cmd = [
- "./config",
+ "./config", *config_args,
"shared", "--debug",
"--prefix={}".format(self.install_dir)
]
if self.version.startswith("3.0"):
self._post_install_300()
+ def _build_src(self, config_args=()):
+ if self.version.startswith("3.0"):
+ config_args += ("enable-fips",)
+ super()._build_src(config_args)
+
def _post_install_300(self):
# create ssl/ subdir with example configs
- self._subprocess_call(
- ["make", "-j1", "install_ssldirs"],
- cwd=self.build_dir
- )
# Install FIPS module
- # https://wiki.openssl.org/index.php/OpenSSL_3.0#Completing_the_installation_of_the_FIPS_Module
- fipsinstall_cnf = os.path.join(
- self.install_dir, "ssl", "fipsinstall.cnf"
- )
- openssl_fips_cnf = os.path.join(
- self.install_dir, "ssl", "openssl-fips.cnf"
- )
- fips_mod = os.path.join(self.lib_dir, "ossl-modules/fips.so")
self._subprocess_call(
- [
- self.openssl_cli, "fipsinstall",
- "-out", fipsinstall_cnf,
- "-module", fips_mod,
- # "-provider_name", "fips",
- # "-mac_name", "HMAC",
- # "-macopt", "digest:SHA256",
- # "-macopt", "hexkey:00",
- # "-section_name", "fips_sect"
- ]
+ ["make", "-j1", "install_ssldirs", "install_fips"],
+ cwd=self.build_dir
)
- with open(openssl_fips_cnf, "w") as f:
- f.write(OPENSSL_FIPS_CNF.format(self=self))
+
@property
def short_version(self):
"""Short version for OpenSSL download URL"""
"APIs, https://github.com/libressl-portable/portable/issues/381")
print()
+ if os.environ.get("PYTHONSTRICTEXTENSIONBUILD") and (self.failed or self.failed_on_import):
+ raise RuntimeError("Failed to build some stdlib modules")
+
def build_extension(self, ext):
if ext.name == '_ctypes':
# Python PEP-3118 (buffer protocol) test module
self.add(Extension('_testbuffer', ['_testbuffer.c']))
- # Test loading multiple modules from one compiled file (http://bugs.python.org/issue16421)
+ # Test loading multiple modules from one compiled file (https://bugs.python.org/issue16421)
self.add(Extension('_testimportmultiple', ['_testimportmultiple.c']))
# Test multi-phase extension module init (PEP 489)
# similar functionality (but slower of course) implemented in Python.
# Sleepycat^WOracle Berkeley DB interface.
- # http://www.oracle.com/database/berkeley-db/db/index.html
+ # https://www.oracle.com/database/technologies/related/berkeleydb.html
#
# This requires the Sleepycat^WOracle DB code. The supported versions
# are set below. Visit the URL above to download
'/usr/include/db3',
'/usr/local/include/db3',
'/opt/sfw/include/db3',
- # Fink defaults (http://fink.sourceforge.net/)
+ # Fink defaults (https://www.finkproject.org/)
'/sw/include/db4',
'/sw/include/db3',
]
db_inc_paths.append('/usr/local/include/db4%d' % x)
db_inc_paths.append('/pkg/db-4.%d/include' % x)
db_inc_paths.append('/opt/db-4.%d/include' % x)
- # MacPorts default (http://www.macports.org/)
+ # MacPorts default (https://www.macports.org/)
db_inc_paths.append('/opt/local/include/db4%d' % x)
# 3.x minor number specific paths
for x in gen_db_minor_ver_nums(3):
# if --enable-loadable-sqlite-extensions configure option is used.
if '--enable-loadable-sqlite-extensions' not in sysconfig.get_config_var("CONFIG_ARGS"):
sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
+ elif MACOS and sqlite_incdir == os.path.join(MACOS_SDK_ROOT, "usr/include"):
+ raise DistutilsError("System version of SQLite does not support loadable extensions")
if MACOS:
# In every directory on the search path search for a dynamic
# Workarounds for toolchain bugs:
if sysconfig.get_config_var('HAVE_IPA_PURE_CONST_BUG'):
# Some versions of gcc miscompile inline asm:
- # http://gcc.gnu.org/bugzilla/show_bug.cgi?id=46491
- # http://gcc.gnu.org/ml/gcc/2010-11/msg00366.html
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=46491
+ # https://gcc.gnu.org/ml/gcc/2010-11/msg00366.html
extra_compile_args.append('-fno-ipa-pure-const')
if sysconfig.get_config_var('HAVE_GLIBC_MEMMOVE_BUG'):
# _FORTIFY_SOURCE wrappers for memmove and bcopy are incorrect:
- # http://sourceware.org/ml/libc-alpha/2010-12/msg00009.html
+ # https://sourceware.org/ml/libc-alpha/2010-12/msg00009.html
undef_macros.append('_FORTIFY_SOURCE')
# Uncomment for extra functionality:
setup(# PyPI Metadata (PEP 301)
name = "Python",
version = sys.version.split()[0],
- url = "http://www.python.org/%d.%d" % sys.version_info[:2],
+ url = "https://www.python.org/%d.%d" % sys.version_info[:2],
maintainer = "Guido van Rossum and the Python community",
maintainer_email = "python-dev@python.org",
description = "A high-level object-oriented programming language",