From: JinWang An Date: Mon, 27 Mar 2023 08:02:50 +0000 (+0900) Subject: Imported Upstream version 62.4.0 X-Git-Tag: upstream/62.4.0^0 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=9cee562961a33f9e671edce31ae5c4693b54807c;p=platform%2Fupstream%2Fpython-setuptools.git Imported Upstream version 62.4.0 --- diff --git a/.bumpversion.cfg b/.bumpversion.cfg index fb32783..43f8594 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 62.3.4 +current_version = 62.4.0 commit = True tag = True diff --git a/.coveragerc b/.coveragerc index 3153808..aaecd68 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,6 +2,9 @@ omit = # leading `*/` for pytest-dev/pytest-cov#456 */.tox/* + */pep517-build-env-* + + # local */_validate_pyproject/* # generated code, tested in `validate-pyproject` [report] diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 092c0dc..4a68822 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -16,11 +16,13 @@ jobs: distutils: - local python: + # Build on pre-releases until stable, then stable releases. + # actions/setup-python#213 + - ~3.7.0-0 + - ~3.10.0-0 + # disabled due to #3365 + # - ~3.11.0-0 - pypy-3.7 - - 3.7 - - 3.8 - - 3.9 - - "3.10" platform: - ubuntu-latest - macos-latest @@ -34,9 +36,9 @@ jobs: SETUPTOOLS_USE_DISTUTILS: ${{ matrix.distutils }} timeout-minutes: 75 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} - uses: actions/cache@v3 @@ -66,6 +68,20 @@ jobs: ${{ runner.os }}, ${{ matrix.python }} + check: # This job does nothing and is only used for the branch protection + if: always() + + needs: + - test + + runs-on: ubuntu-latest + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} + test_cygwin: strategy: matrix: @@ -121,14 +137,17 @@ jobs: run: tox -e integration release: - needs: [test, test_cygwin, integration-test] + needs: + - check + - test_cygwin + - integration-test if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') runs-on: ubuntu-latest timeout-minutes: 75 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install tox diff --git a/CHANGES.rst b/CHANGES.rst index 70c9897..61e7965 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,36 @@ +v62.4.0 +------- + + +Changes +^^^^^^^ +* #3256: Added setuptools.command.build command to match distutils.command.build -- by :user:`isuruf` +* #3366: Merge with pypa/distutils@75ed79d including reformat using black, fix for Cygwin support (pypa/distutils#139), and improved support for cross compiling (pypa/distutils#144 and pypa/distutils#145). + +Documentation changes +^^^^^^^^^^^^^^^^^^^^^ +* #3355: Changes to the User Guide's Entry Points page -- by :user:`codeandfire` +* #3361: Further minor corrections to the Entry Points page -- by :user:`codeandfire` +* #3363: Rework some documentation pages to de-emphasize ``distutils`` and the history + of packaging in the Python ecosystem. The focus of these changes is to make the + documentation easier to read for new users. +* #3364: Update documentation about dependency management, removing mention to + the deprecated ``dependency_links`` and adding some small improvements. +* #3367: Extracted text about automatic resource extraction and the zip-safe flag + from ``userguide/miscellaneous`` to ``deprecated/resource_extraction`` and + ``deprecated/zip_safe``. + + Extracted text about additional metadata files from + ``userguide/miscellaneous`` into the existing ``userguide/extension`` + document. + + Updated ``userguide/extension`` to better reflect the status of the + setuptools project. + + Removed ``userguide/functionalities_rewrite`` (a virtually empty part of the + docs). + + v62.3.4 ------- @@ -80,10 +113,13 @@ Changes Documentation changes ^^^^^^^^^^^^^^^^^^^^^ -* #3307: Added introduction to references/keywords - Added deprecation tags to test kwargs - Moved userguide/keywords to deprecated section - Clarified in deprecated doc what keywords came from distutils and which were added or changed by setuptools +* #3307: Added introduction to references/keywords. + + Added deprecation tags to test kwargs. + + Moved userguide/keywords to deprecated section. + + Clarified in deprecated doc what keywords came from distutils and which were added or changed by setuptools. Misc ^^^^ diff --git a/docs/build_meta.rst b/docs/build_meta.rst index dc4c2f3..fab42b0 100644 --- a/docs/build_meta.rst +++ b/docs/build_meta.rst @@ -95,6 +95,9 @@ or:: $ pip install dist/meowpkg-0.0.1.tar.gz + +.. _backend-wrapper: + Dynamic build dependencies and other ``build_meta`` tweaks ---------------------------------------------------------- diff --git a/docs/conf.py b/docs/conf.py index 9028691..159eedc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,12 +10,20 @@ link_files = { ), replace=[ dict( - pattern=r'(?\d+)', - url='{package_url}/pull/{pull}', + pattern=r'(Issue #|\B#)(?P\d+)', + url='{package_url}/issues/{issue}', ), dict( - pattern=r'(?\d+)', - url='{package_url}/issues/{issue}', + pattern=r'(?m:^((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n)', + with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', + ), + dict( + pattern=r'PEP[- ](?P\d+)', + url='https://peps.python.org/pep-{pep_number:0>4}/', + ), + dict( + pattern=r'(?\d+)', + url='{package_url}/pull/{pull}', ), dict( pattern=r'BB Pull Request ?#(?P\d+)', @@ -57,10 +65,6 @@ link_files = { pattern=r'[Pp]ackaging (?P\d+(\.\d+)+)', url='{GH}/pypa/packaging/blob/{packaging_ver}/CHANGELOG.rst', ), - dict( - pattern=r'(?\d+)', - url='https://www.python.org/dev/peps/pep-{pep_number:0>4}/', - ), dict( pattern=r'setuptools_svn #(?P\d+)', url='{GH}/jaraco/setuptools_svn/issues/{setuptools_svn}', @@ -73,10 +77,6 @@ link_files = { pattern=r'pypa/(?P[\-\.\w]+)@(?P[\da-f]+)', url='{GH}/pypa/{commit_repo}/commit/{commit_number}', ), - dict( - pattern=r'^(?m)((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n', - with_scm='{text}\n{rev[timestamp]:%d %b %Y}\n', - ), ], ), } @@ -178,7 +178,6 @@ nitpick_ignore = [ # Allow linking objects on other Sphinx sites seamlessly: intersphinx_mapping.update( - python2=('https://docs.python.org/2', None), python=('https://docs.python.org/3', None), ) diff --git a/docs/deprecated/dependency_links.rst b/docs/deprecated/dependency_links.rst new file mode 100644 index 0000000..70e1a78 --- /dev/null +++ b/docs/deprecated/dependency_links.rst @@ -0,0 +1,77 @@ +Specifying dependencies that aren't in PyPI via ``dependency_links`` +==================================================================== + +.. warning:: + Dependency links support has been dropped by pip starting with version + 19.0 (released 2019-01-22). + +If your project depends on packages that don't exist on PyPI, you *may* still be +able to depend on them if they are available for download as: + +- an egg, in the standard distutils ``sdist`` format, +- a single ``.py`` file, or +- a VCS repository (Subversion, Mercurial, or Git). + +You need to add some URLs to the ``dependency_links`` argument to ``setup()``. + +The URLs must be either: + +1. direct download URLs, +2. the URLs of web pages that contain direct download links, or +3. the repository's URL + +In general, it's better to link to web pages, because it is usually less +complex to update a web page than to release a new version of your project. +You can also use a SourceForge ``showfiles.php`` link in the case where a +package you depend on is distributed via SourceForge. + +If you depend on a package that's distributed as a single ``.py`` file, you +must include an ``"#egg=project-version"`` suffix to the URL, to give a project +name and version number. (Be sure to escape any dashes in the name or version +by replacing them with underscores.) EasyInstall will recognize this suffix +and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file +as an egg. + +In the case of a VCS checkout, you should also append ``#egg=project-version`` +in order to identify for what package that checkout should be used. You can +append ``@REV`` to the URL's path (before the fragment) to specify a revision. +Additionally, you can also force the VCS being used by prepending the URL with +a certain prefix. Currently available are: + +- ``svn+URL`` for Subversion, +- ``git+URL`` for Git, and +- ``hg+URL`` for Mercurial + +A more complete example would be: + + ``vcs+proto://host/path@revision#egg=project-version`` + +Be careful with the version. It should match the one inside the project files. +If you want to disregard the version, you have to omit it both in the +``requires`` and in the URL's fragment. + +This will do a checkout (or a clone, in Git and Mercurial parlance) to a +temporary folder and run ``setup.py bdist_egg``. + +The ``dependency_links`` option takes the form of a list of URL strings. For +example, this will cause a search of the specified page for eggs or source +distributions, if the package's dependencies aren't already installed: + +.. tab:: setup.cfg + + .. code-block:: ini + + [options] + #... + dependency_links = http://peak.telecommunity.com/snapshots/ + +.. tab:: setup.py + + .. code-block:: python + + setup( + ..., + dependency_links=[ + "http://peak.telecommunity.com/snapshots/", + ], + ) diff --git a/docs/deprecated/index.rst b/docs/deprecated/index.rst index 8169b3b..ea9069e 100644 --- a/docs/deprecated/index.rst +++ b/docs/deprecated/index.rst @@ -14,8 +14,12 @@ objectives. :maxdepth: 1 changed_keywords + dependency_links python_eggs easy_install + zip_safe + resource_extraction distutils/index distutils-legacy functionalities + running_commands diff --git a/docs/deprecated/resource_extraction.rst b/docs/deprecated/resource_extraction.rst new file mode 100644 index 0000000..c481e33 --- /dev/null +++ b/docs/deprecated/resource_extraction.rst @@ -0,0 +1,54 @@ +.. _Automatic Resource Extraction: + +Automatic Resource Extraction +============================= + +In a modern setup, Python packages are usually installed as directories, +and all the files can be found on deterministic locations on the disk. +This means that most of the tools expect package resources to be "real" files. + +There are a few occasions however that packages are loaded in a different way +(e.g., from a zip file), which is incompatible with the assumptions mentioned above. +Moreover, a package developer may also include non-extension native libraries or other files that +C extensions may expect to be able to access. + +In these scenarios, the use of :mod:`importlib.resources` is recommended. + +Old implementations (prior to the advent of :mod:`importlib.resources`) and +long-living projects, however, may still rely on the library ``pkg_resources`` +to access these files. + +If you have to support such systems, or want to provide backward compatibility +for ``pkg_resources``, you may need to add an special configuration +to ``setuptools`` when packaging a project. +This can be done by listing as ``eager_resources`` (argument to ``setup()`` +in ``setup.py`` or field in ``setup.cfg``) all the files that need to be +extracted together, whenever a C extension in the project is imported. + +This is especially important if your project includes shared libraries *other* +than ``distutils``/``setuptools``-built C extensions, and those shared libraries use file +extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the +extensions that setuptools 0.6a8 and higher automatically detects as shared +libraries and adds to the ``native_libs.txt`` file for you. Any shared +libraries whose names do not end with one of those extensions should be listed +as ``eager_resources``, because they need to be present in the filesystem when +he C extensions that link to them are used. + +The ``pkg_resources`` runtime for compressed packages will automatically +extract *all* C extensions and ``eager_resources`` at the same time, whenever +*any* C extension or eager resource is requested via the ``resource_filename()`` +API. (C extensions are imported using ``resource_filename()`` internally.) +This ensures that C extensions will see all of the "real" files that they +expect to see. + +Note also that you can list directory resource names in ``eager_resources`` as +well, in which case the directory's contents (including subdirectories) will be +extracted whenever any C extension or eager resource is requested. + +Please note that if you're not sure whether you need to use this argument, you +don't! It's really intended to support projects with lots of non-Python +dependencies and as a last resort for crufty projects that can't otherwise +handle being compressed. If your package is pure Python, Python plus data +files, or Python plus C, you really don't need this. You've got to be using +either C or an external program that needs "real" files in your project before +there's any possibility of ``eager_resources`` being relevant to your project. diff --git a/docs/deprecated/running_commands.rst b/docs/deprecated/running_commands.rst new file mode 100644 index 0000000..8d4ca93 --- /dev/null +++ b/docs/deprecated/running_commands.rst @@ -0,0 +1,23 @@ +Running ``setuptools`` commands +=============================== + +Historically, ``setuptools`` allowed running commands via a ``setup.py`` script +at the root of a Python project, as indicated in the examples below:: + + python setup.py --help + python setup.py --help-commands + python setup.py --version + python setup.py sdist + python setup.py bdist_wheel + +You could also run commands in other circumstances: + +* ``setuptools`` projects without ``setup.py`` (e.g., ``setup.cfg``-only):: + + python -c "import setuptools; setup()" --help + +* ``distutils`` projects (with a ``setup.py`` importing ``distutils``):: + + python -c "import setuptools; with open('setup.py') as f: exec(compile(f.read(), 'setup.py', 'exec'))" develop + +That is, you can simply list the normal setup commands and options following the quoted part. diff --git a/docs/deprecated/zip_safe.rst b/docs/deprecated/zip_safe.rst new file mode 100644 index 0000000..08a1333 --- /dev/null +++ b/docs/deprecated/zip_safe.rst @@ -0,0 +1,74 @@ +Understanding the ``zip_safe`` flag +=================================== + +The ``zip_safe`` flag is a ``setuptools`` configuration mainly associated +with the ``egg`` distribution format +(which got replaced in the ecosystem by the newer ``wheel`` format) and the +``easy_install`` command (deprecated in ``setuptools`` v58.3.0). + +It is very unlikely that the values of ``zip_safe`` will affect modern +deployments that use :pypi:`pip` for installing packages. +Moreover, new users of ``setuptools`` should not attempt to create egg files +using the deprecated ``build_egg`` command. +Therefore, this flag is considered **obsolete**. + +This document, however, describes what was the historical motivation behind +this flag, and how it was used. + +Historical Motivation +--------------------- + +For some use cases (such as bundling as part of a larger application), Python +packages may be run directly from a zip file. +Not all packages, however, are capable of running in compressed form, because +they may expect to be able to access either source code or data files as +normal operating system files. + +In the past, ``setuptools`` would install a project distributed +as a zipfile or a directory (via the ``easy_install`` command or +``python setup.py install``), +the default choice being determined by the project's ``zip_safe`` flag. + +How the ``zip_safe`` flag was used? +----------------------------------- + +To set this flag, a developer would pass a boolean value for the ``zip_safe`` argument to the +``setup()`` function, or omit it. When omitted, the ``bdist_egg`` +command would analyze the project's contents to see if it could detect any +conditions that preventing the project from working in a zipfile. + +This was extremely conservative: ``bdist_egg`` would consider the +project unsafe if it contained any C extensions or datafiles whatsoever. This +does *not* mean that the project couldn't or wouldn't work as a zipfile! It just +means that the ``bdist_egg`` authors were not yet comfortable asserting that +the project *would* work. If the project did not contain any C or data files, and did not +attempt to perform ``__file__`` or ``__path__`` introspection or source code manipulation, then +there was an extremely solid chance the project will work when installed as a +zipfile. (And if the project used ``pkg_resources`` for all its data file +access, then C extensions and other data files shouldn't be a problem at all. +See the :ref:`Accessing Data Files at Runtime` section for more information.) + +The developer could manually set ``zip_safe`` to ``True`` to perform tests, +or to override the default behaviour (after checking all the warnings and +understanding the implications), this would allow ``setuptools`` to install the +project as a zip file. Alternatively, by setting ``zip_safe`` to ``False``, +developers could force ``setuptools`` to always install the project as a +directory. + +Modern ways of loading packages from zip files +---------------------------------------------- + +Currently, popular Python package installers (such as :pypi:`pip`) and package +indexes (such as PyPI_) consider that distribution packages are always +installed as a directory. +It is however still possible to load packages from zip files added to +:obj:`sys.path`, thanks to the :mod:`zipimport` module +and the :mod:`importlib` machinery provided by Python standard library. + +When working with modules loaded from a zip file, it is important to keep in +mind that values of ``__file__`` and ``__path__`` might not work as expected. +Please check the documentation for :mod:`importlib.resources`, if file +locations are important for your use case. + + +.. _PyPI: https://pypi.org diff --git a/docs/index.rst b/docs/index.rst index 0f52c36..8328f87 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,6 +7,10 @@ Documentation Setuptools is a fully-featured, actively-maintained, and stable library designed to facilitate packaging Python projects. +It helps developers to easily share reusable code (in the form of a library) +and programs (e.g., CLI/GUI tools implemented in Python), that can be installed +with :pypi:`pip` and uploaded to `PyPI `_. + .. toctree:: :maxdepth: 1 :hidden: diff --git a/docs/userguide/dependency_management.rst b/docs/userguide/dependency_management.rst index d507a58..c7f1e05 100644 --- a/docs/userguide/dependency_management.rst +++ b/docs/userguide/dependency_management.rst @@ -6,24 +6,23 @@ There are three types of dependency styles offered by setuptools: 1) build system requirement, 2) required dependency and 3) optional dependency. -.. Note:: - Packages that are added to dependency can be optionally specified with the - version by following `PEP 440 `_ +.. attention:: + Each dependency, regardless of type, needs to be specified according to :pep:`508`. + This allows adding version :pep:`range restrictions <440#version-specifiers>` + and :ref:`environment markers `. + Please note however that public package indexes, such as `PyPI`_ + might not accept packages that declare dependencies using + :pep:`direct URLs <440#direct-references>`. Build system requirement ======================== -Package requirement -------------------- After organizing all the scripts and files and getting ready for packaging, -there needs to be a way to tell Python what programs it needs to actually -do the packaging (in our case, ``setuptools`` of course). Usually, -you also need the ``wheel`` package as well since it is recommended that you -upload a ``.whl`` file to PyPI alongside your ``.tar.gz`` file. Unlike the -other two types of dependency keyword, this one is specified in your -``pyproject.toml`` file (if you have forgot what this is, go to -:doc:`quickstart` or (WIP)): +there needs to be a way to specify what programs and libraries are actually needed +do the packaging (in our case, ``setuptools`` of course). +This needs to be specified in your ``pyproject.toml`` file +(if you have forgot what this is, go to :doc:`/userguide/quickstart` or :doc:`/build_meta`): .. code-block:: ini @@ -31,11 +30,16 @@ other two types of dependency keyword, this one is specified in your requires = ["setuptools"] #... +Please note that you should also include here any other ``setuptools`` plugin +(e.g., :pypi:`setuptools-scm`, :pypi:`setuptools-golang`, :pypi:`setuptools-rust`) +or build-time dependency (e.g., :pypi:`Cython`, :pypi:`cppy`, :pypi:`pybind11`). + .. note:: - This used to be accomplished with the ``setup_requires`` keyword but is - now considered deprecated in favor of the PEP 517 style described above. + In previous versions of ``setuptools``, + this used to be accomplished with the ``setup_requires`` keyword but is + now considered deprecated in favor of the :pep:`517` style described above. To peek into how this legacy keyword is used, consult our :doc:`guide on - deprecated practice (WIP) <../deprecated/index>` + deprecated practice (WIP) `. .. _Declaring Dependencies: @@ -82,12 +86,14 @@ finesse to it, let's start with a simple example. # ... -When your project is installed (e.g. using pip), all of the dependencies not -already installed will be located (via PyPI), downloaded, built (if necessary), +When your project is installed (e.g., using :pypi:`pip`), all of the dependencies not +already installed will be located (via `PyPI`_), downloaded, built (if necessary), and installed and 2) Any scripts in your project will be installed with wrappers that verify the availability of the specified dependencies at runtime. +.. _environment-markers: + Platform specific dependencies ------------------------------ Setuptools offers the capability to evaluate certain conditions before blindly @@ -165,101 +171,22 @@ and only install it if the user is using a Windows operating system: # ... The environmental markers that may be used for testing platform types are -detailed in `PEP 508 `_. - - -Dependencies that aren't in PyPI --------------------------------- -.. warning:: - Dependency links support has been dropped by pip starting with version - 19.0 (released 2019-01-22). - -If your project depends on packages that don't exist on PyPI, you may still be -able to depend on them, as long as they are available for download as: - -- an egg, in the standard distutils ``sdist`` format, -- a single ``.py`` file, or -- a VCS repository (Subversion, Mercurial, or Git). - -You just need to add some URLs to the ``dependency_links`` argument to -``setup()``. - -The URLs must be either: - -1. direct download URLs, -2. the URLs of web pages that contain direct download links, or -3. the repository's URL - -In general, it's better to link to web pages, because it is usually less -complex to update a web page than to release a new version of your project. -You can also use a SourceForge ``showfiles.php`` link in the case where a -package you depend on is distributed via SourceForge. - -If you depend on a package that's distributed as a single ``.py`` file, you -must include an ``"#egg=project-version"`` suffix to the URL, to give a project -name and version number. (Be sure to escape any dashes in the name or version -by replacing them with underscores.) EasyInstall will recognize this suffix -and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file -as an egg. - -In the case of a VCS checkout, you should also append ``#egg=project-version`` -in order to identify for what package that checkout should be used. You can -append ``@REV`` to the URL's path (before the fragment) to specify a revision. -Additionally, you can also force the VCS being used by prepending the URL with -a certain prefix. Currently available are: - -- ``svn+URL`` for Subversion, -- ``git+URL`` for Git, and -- ``hg+URL`` for Mercurial - -A more complete example would be: - - ``vcs+proto://host/path@revision#egg=project-version`` - -Be careful with the version. It should match the one inside the project files. -If you want to disregard the version, you have to omit it both in the -``requires`` and in the URL's fragment. - -This will do a checkout (or a clone, in Git and Mercurial parlance) to a -temporary folder and run ``setup.py bdist_egg``. - -The ``dependency_links`` option takes the form of a list of URL strings. For -example, this will cause a search of the specified page for eggs or source -distributions, if the package's dependencies aren't already installed: - -.. tab:: setup.cfg - - .. code-block:: ini - - [options] - #... - dependency_links = http://peak.telecommunity.com/snapshots/ - -.. tab:: setup.py +detailed in :pep:`508`. - .. code-block:: python - - setup( - ..., - dependency_links=[ - "http://peak.telecommunity.com/snapshots/", - ], - ) +.. seealso:: + If environment markers are not enough an specific use case, + you can also consider creating a :ref:`backend wrapper ` + to implement custom detection logic. Optional dependencies ===================== -Setuptools allows you to declare dependencies that only get installed under -specific circumstances. These dependencies are specified with the ``extras_require`` -keyword and are only installed if another package depends on it (either -directly or indirectly). This makes it convenient to declare dependencies for -ancillary functions such as "tests" and "docs". - -.. note:: - ``tests_require`` is now deprecated +Setuptools allows you to declare dependencies that are not installed by default. +This effectively means that you can create a "variant" of your package with a +set of extra functionalities. -For example, Package-A offers optional PDF support and requires two other -dependencies for it to work: +For example, let's consider a ``Package-A`` that offers +optional PDF support and requires two other dependencies for it to work: .. tab:: setup.cfg @@ -269,7 +196,9 @@ dependencies for it to work: name = Package-A [options.extras_require] - PDF = ReportLab>=1.2; RXP + PDF = + ReportLab>=1.2 + RXP .. tab:: setup.py @@ -277,7 +206,7 @@ dependencies for it to work: .. code-block:: python setup( - name="Project-A", + name="Package-A", ..., extras_require={ "PDF": ["ReportLab>=1.2", "RXP"], @@ -288,15 +217,23 @@ dependencies for it to work: .. code-block:: toml + [project] + name = "Package-A" # ... [project.optional-dependencies] PDF = ["ReportLab>=1.2", "RXP"] -The name ``PDF`` is an arbitrary identifier of such a list of dependencies, to +.. sidebar:: + + .. tip:: + It is also convenient to declare optional requirements for + ancillary tasks such as running tests and or building docs. + +The name ``PDF`` is an arbitrary :pep:`identifier <685>` of such a list of dependencies, to which other components can refer and have them installed. A use case for this approach is that other package can use this "extra" for their -own dependencies. For example, if "Project-B" needs "project A" with PDF support +own dependencies. For example, if ``Package-B`` needs ``Package-B`` with PDF support installed, it might declare the dependency like this: .. tab:: setup.cfg @@ -304,21 +241,21 @@ installed, it might declare the dependency like this: .. code-block:: ini [metadata] - name = Project-B + name = Package-B #... [options] #... install_requires = - Project-A[PDF] + Package-A[PDF] .. tab:: setup.py .. code-block:: python setup( - name="Project-B", - install_requires=["Project-A[PDF]"], + name="Package-B", + install_requires=["Package-A[PDF]"], ..., ) @@ -327,71 +264,71 @@ installed, it might declare the dependency like this: .. code-block:: toml [project] - name = "Project-B" + name = "Package-B" # ... dependencies = [ - "Project-A[PDF]" + "Package-A[PDF]" ] -This will cause ReportLab to be installed along with project A, if project B is -installed -- even if project A was already installed. In this way, a project +This will cause ``ReportLab`` to be installed along with ``Package-A``, if ``Package-B`` is +installed -- even if ``Package-A`` was already installed. In this way, a project can encapsulate groups of optional "downstream dependencies" under a feature name, so that packages that depend on it don't have to know what the downstream -dependencies are. If a later version of Project A builds in PDF support and -no longer needs ReportLab, or if it ends up needing other dependencies besides -ReportLab in order to provide PDF support, Project B's setup information does +dependencies are. If a later version of ``Package-A`` builds in PDF support and +no longer needs ``ReportLab``, or if it ends up needing other dependencies besides +``ReportLab`` in order to provide PDF support, ``Package-B``'s setup information does not need to change, but the right packages will still be installed if needed. -.. note:: +.. tip:: Best practice: if a project ends up no longer needing any other packages to support a feature, it should keep an empty requirements list for that feature in its ``extras_require`` argument, so that packages depending on that feature don't break (due to an invalid feature name). -Historically ``setuptools`` also used to support extra dependencies in console -scripts, for example: +.. warning:: + Historically ``setuptools`` also used to support extra dependencies in console + scripts, for example: -.. tab:: setup.cfg + .. tab:: setup.cfg - .. code-block:: ini + .. code-block:: ini - [metadata] - name = Project A - #... + [metadata] + name = Package-A + #... - [options] - #... - entry_points= - [console_scripts] - rst2pdf = project_a.tools.pdfgen [PDF] - rst2html = project_a.tools.htmlgen + [options] + #... + entry_points= + [console_scripts] + rst2pdf = project_a.tools.pdfgen [PDF] + rst2html = project_a.tools.htmlgen -.. tab:: setup.py + .. tab:: setup.py - .. code-block:: python + .. code-block:: python - setup( - name="Project-A", - ..., - entry_points={ - "console_scripts": [ - "rst2pdf = project_a.tools.pdfgen [PDF]", - "rst2html = project_a.tools.htmlgen", - ], - }, - ) + setup( + name="Package-A", + ..., + entry_points={ + "console_scripts": [ + "rst2pdf = project_a.tools.pdfgen [PDF]", + "rst2html = project_a.tools.htmlgen", + ], + }, + ) -This syntax indicates that the entry point (in this case a console script) -is only valid when the PDF extra is installed. It is up to the installer -to determine how to handle the situation where PDF was not indicated -(e.g. omit the console script, provide a warning when attempting to load -the entry point, assume the extras are present and let the implementation -fail later). + This syntax indicates that the entry point (in this case a console script) + is only valid when the PDF extra is installed. It is up to the installer + to determine how to handle the situation where PDF was not indicated + (e.g., omit the console script, provide a warning when attempting to load + the entry point, assume the extras are present and let the implementation + fail later). -.. warning:: - ``pip`` and other tools might not support this use case for extra - dependencies, therefore this practice is considered **deprecated**. - See :doc:`PyPUG:specifications/entry-points`. + **However**, ``pip`` and other tools might not support this use case for extra + dependencies, therefore this practice is considered **deprecated**. + See :doc:`PyPUG:specifications/entry-points`. Python requirement @@ -404,7 +341,7 @@ This can be configured as shown in the example below. .. code-block:: ini [metadata] - name = Project-B + name = Package-B #... [options] @@ -416,7 +353,7 @@ This can be configured as shown in the example below. .. code-block:: python setup( - name="Project-B", + name="Package-B", python_requires=">=3.6", ..., ) @@ -427,7 +364,7 @@ This can be configured as shown in the example below. .. code-block:: toml [project] - name = "Project-B" + name = "Package-B" requires-python = ">=3.6" # ... @@ -441,3 +378,6 @@ This can be configured as shown in the example below. options via the ``[project]`` and ``[tool.setuptools]`` tables is still experimental and might change in future releases. See :doc:`/userguide/pyproject_config`. + + +.. _PyPI: https://pypi.org diff --git a/docs/userguide/development_mode.rst b/docs/userguide/development_mode.rst index 90bc567..fafcc52 100644 --- a/docs/userguide/development_mode.rst +++ b/docs/userguide/development_mode.rst @@ -1,13 +1,13 @@ "Development Mode" ================== -Under normal circumstances, the ``distutils`` assume that you are going to +Under normal circumstances, the ``setuptools`` assume that you are going to build a distribution of your project, not use it in its "raw" or "unbuilt" -form. However, if you were to use the ``distutils`` to build a distribution, +form. However, if you were to use the ``setuptools`` to build a distribution, you would have to rebuild and reinstall your project every time you made a change to it during development. -Another problem that sometimes comes up with the ``distutils`` is that you may +Another problem that sometimes comes is that you may need to do development on two related projects at the same time. You may need to put both projects' packages in the same directory to run them, but need to keep them separate for revision control purposes. How can you do this? @@ -15,38 +15,20 @@ keep them separate for revision control purposes. How can you do this? Setuptools allows you to deploy your projects for use in a common directory or staging area, but without copying any files. Thus, you can edit each project's code in its checkout directory, and only need to run build commands when you -change a project's C extensions or similarly compiled files. You can even -deploy a project into another project's checkout directory, if that's your -preferred way of working (as opposed to using a common independent staging area -or the site-packages directory). - -To do this, use the ``setup.py develop`` command. It works very similarly to -``setup.py install``, except that it doesn't actually install anything. -Instead, it creates a special ``.egg-link`` file in the deployment directory, -that links to your project's source code. And, if your deployment directory is -Python's ``site-packages`` directory, it will also update the -``easy-install.pth`` file to include your project's source code, thereby making +change files that need to be compiled or the provided metadata and setuptools configuration. + +You can perform a ``pip`` installation passing the ``-e/--editable`` +flag (e.g., ``pip install -e .``). It works very similarly to +``pip install .``, except that it doesn't actually install anything. +Instead, it creates a special ``.egg-link`` file in the target directory +(usually ``site-packages``) that links to your project's source code. +It may also update an existing ``easy-install.pth`` file +to include your project's source code, thereby making it available on ``sys.path`` for all programs using that Python installation. -In addition, the ``develop`` command creates wrapper scripts in the target -script directory that will run your in-development scripts after ensuring that -all your ``install_requires`` packages are available on ``sys.path``. - -You can deploy the same project to multiple staging areas, e.g. if you have +You can deploy the same project to multiple staging areas, e.g., if you have multiple projects on the same machine that are sharing the same project you're doing development work. -When you're done with a given development task, you can remove the project -source from a staging area using ``setup.py develop --uninstall``, specifying -the desired staging area if it's not the default. - -There are several options to control the precise behavior of the ``develop`` -command; see the section on the :ref:`develop ` command below for more details. - -Note that you can also apply setuptools commands to non-setuptools projects, -using commands like this:: - - python -c "import setuptools; with open('setup.py') as f: exec(compile(f.read(), 'setup.py', 'exec'))" develop - -That is, you can simply list the normal setup commands and options following -the quoted part. +When you're done with a given development task, you can simply uninstall your +package (as you would normally do with ``pip uninstall ``). diff --git a/docs/userguide/entry_point.rst b/docs/userguide/entry_point.rst index 9dca389..b7dd7aa 100644 --- a/docs/userguide/entry_point.rst +++ b/docs/userguide/entry_point.rst @@ -4,15 +4,27 @@ Entry Points ============ -Packages may provide commands to be run at the console (console scripts), -such as the ``pip`` command. These commands are defined for a package -as a specific kind of entry point in the ``setup.cfg`` or -``setup.py``. +Entry points are a type of metadata that can be exposed by packages on installation. +They are a very useful feature of the Python ecosystem, +and come specially handy in two scenarios: + +1. The package would like to provide commands to be run at the terminal. +This functionality is known as *console* scripts. The command may also +open up a GUI, in which case it is known as a *GUI* script. An example +of a console script is the one provided by the :pypi:`pip` package, which +allows you to run commands like ``pip install`` in the terminal. + +2. A package would like to enable customization of its functionalities +via *plugins*. For example, the test framework :pypi:`pytest` allows +customization via the ``pytest11`` entry point, and the syntax +highlighting tool :pypi:`pygments` allows specifying additional styles +using the entry point ``pygments.styles``. Console Scripts =============== +Let us start with console scripts. First consider an example without entry points. Imagine a package defined thus:: @@ -59,23 +71,23 @@ configuration: .. tab:: setup.cfg - .. code-block:: ini + .. code-block:: ini - [options.entry_points] - console_scripts = - hello-world = timmins:hello_world + [options.entry_points] + console_scripts = + hello-world = timmins:hello_world .. tab:: setup.py .. code-block:: python - + from setuptools import setup setup( # ..., entry_points={ 'console_scripts': [ - 'hello-world=timmins:hello_world', + 'hello-world = timmins:hello_world', ] } ) @@ -99,18 +111,15 @@ After installing the package, a user may invoke that function by simply calling Note that any function configured as a console script, i.e. ``hello_world()`` in this example, should not accept any arguments. If your function requires any input from the user, you can use regular command-line argument parsing utilities like -`argparse `_ within the body of +:mod:`argparse` within the body of the function to parse user input given via :obj:`sys.argv`. -The syntax for entry points is specified as follows: - -.. code-block:: ini - - = [.[.]][:.] - -where ``name`` is the name for the script you want to create, the left hand -side of ``:`` is the module that contains your function and the right hand -side is the object you want to invoke (e.g. a function). +You may have noticed that we have used a special syntax to specify the function +that must be invoked by the console script, i.e. we have written ``timmins:hello_world`` +with a colon ``:`` separating the package name and the function name. The full +specification of this syntax is discussed in the `last section <#entry-points-syntax>`_ +of this document, and this can be used to specify a function located anywhere in +your package, not just in ``__init__.py``. GUI Scripts =========== @@ -141,14 +150,14 @@ Then, we can add a GUI script entry point: .. tab:: setup.py .. code-block:: python - + from setuptools import setup setup( # ..., entry_points={ 'gui_scripts': [ - 'hello-world=timmins:hello_world', + 'hello-world = timmins:hello_world', ] } ) @@ -174,7 +183,8 @@ will open a small application window with the title 'Hello world'. Note that just as with console scripts, any function configured as a GUI script should not accept any arguments, and any user input can be parsed within the -body of the function. +body of the function. GUI scripts also use the same syntax (discussed in the +`last section <#entry-points-syntax>`_) for specifying the function to be invoked. .. note:: @@ -279,12 +289,12 @@ as follows: ``group``, and a ``value``. For example, after setting up the plugin package as described below, ``display_eps`` in the above code will look like this: [#package_metadata]_ - .. code-block:: python + .. code-block:: python - ( - EntryPoint(name='excl', value='timmins_plugin_fancy:excl_display', group='timmins.display'), - ..., - ) + ( + EntryPoint(name='excl', value='timmins_plugin_fancy:excl_display', group='timmins.display'), + ..., + ) ``display_eps`` will now be a list of ``EntryPoint`` objects, each referring to ``display()``-like functions defined by one or more installed plugin packages. Then, to import a specific @@ -341,7 +351,7 @@ of ``timmins-plugin-fancy``: [options.entry_points] timmins.display = - excl = timmins_plugin_fancy:excl_display + excl = timmins_plugin_fancy:excl_display .. tab:: setup.py @@ -352,8 +362,8 @@ of ``timmins-plugin-fancy``: setup( # ..., entry_points = { - 'timmins.display' = [ - 'excl=timmins_plugin_fancy:excl_display' + 'timmins.display': [ + 'excl = timmins_plugin_fancy:excl_display' ] } ) @@ -362,14 +372,15 @@ of ``timmins-plugin-fancy``: .. code-block:: toml + # Note the quotes around timmins.display in order to escape the dot . [project.entry-points."timmins.display"] excl = "timmins_plugin_fancy:excl_display" Basically, this configuration states that we are a supplying an entry point under the group ``timmins.display``. The entry point is named ``excl`` and it -refers to the function ``excl_display`` defined by the package ``timmins_plugin_fancy``. +refers to the function ``excl_display`` defined by the package ``timmins-plugin-fancy``. -Now, if we install both ``timmins`` and ``timmins_plugin_fancy``, we should get +Now, if we install both ``timmins`` and ``timmins-plugin-fancy``, we should get the following: .. code-block:: pycon @@ -378,7 +389,7 @@ the following: >>> hello_world() !!! Hello world !!! -whereas if we only install ``timmins`` and not ``timmins_plugin_fancy``, we should +whereas if we only install ``timmins`` and not ``timmins-plugin-fancy``, we should get the following: .. code-block:: pycon @@ -411,8 +422,8 @@ The configuration of ``timmins-plugin-fancy`` would then change to: [options.entry_points] timmins.display = - excl = timmins_plugin_fancy:excl_display - lined = timmins_plugin_fancy:lined_display + excl = timmins_plugin_fancy:excl_display + lined = timmins_plugin_fancy:lined_display .. tab:: setup.py @@ -423,9 +434,9 @@ The configuration of ``timmins-plugin-fancy`` would then change to: setup( # ..., entry_points = { - 'timmins.display' = [ - 'excl=timmins_plugin_fancy:excl_display', - 'lined=timmins_plugin_fancy:lined_display', + 'timmins.display': [ + 'excl = timmins_plugin_fancy:excl_display', + 'lined = timmins_plugin_fancy:lined_display', ] } ) @@ -462,23 +473,26 @@ is useful: # do something with display ... -importlib.metadata ------------------- +Another point is that in this particular example, we have used plugins to +customize the behaviour of a function (``display()``). In general, we can use entry +points to enable plugins to not only customize the behaviour of functions, but also +of entire classes and modules. This is unlike the case of console/GUI scripts, +where entry points can only refer to functions. The syntax used for specifying the +entry points remains the same as for console/GUI scripts, and is discussed in the +`last section <#entry-points-syntax>`_. + +.. tip:: + The recommended approach for loading and importing entry points is the + :mod:`importlib.metadata` module, + which is a part of the standard library since Python 3.8. For older versions of + Python, its backport :pypi:`importlib_metadata` should be used. While using the + backport, the only change that has to be made is to replace ``importlib.metadata`` + with ``importlib_metadata``, i.e. -The recommended approach for loading and importing entry points is the -:mod:`importlib.metadata` module, -which is a part of the standard library since Python 3.8. For older versions of -Python, its backport :pypi:`importlib_metadata` should be used. While using the -backport, the only change that has to be made is to replace ``importlib.metadata`` -with ``importlib_metadata``, i.e. - -.. code-block:: python - - from importlib_metadata import entry_points - ... + .. code-block:: python -Summary -------- + from importlib_metadata import entry_points + ... In summary, entry points allow a package to open its functionalities for customization via plugins. @@ -487,28 +501,64 @@ or prior knowledge about the plugins implementing the entry points, and downstream users are able to compose functionality by pulling together plugins implementing the entry points. +Entry Points Syntax +=================== + +The syntax for entry points is specified as follows:: -Dependency Management -===================== + = [:[.[.]*]] -Some entry points may require additional dependencies to properly function. -For such an entry point, declare in square brackets any number of dependency -``extras`` following the entry point definition. Such entry points will only -be viable if their extras were declared and installed. See the -:doc:`guide on dependencies management ` for -more information on defining extra requirements. Consider from the -above example: +Here, the square brackets ``[]`` denote optionality and the asterisk ``*`` +denotes repetition. +``name`` is the name of the script/entry point you want to create, the left hand +side of ``:`` is the package or module that contains the object you want to invoke +(think about it as something you would write in an import statement), and the right +hand side is the object you want to invoke (e.g. a function). -.. code-block:: ini +To make this syntax more clear, consider the following examples: + +Package or module + If you supply:: + + = + + as the entry point, where ```` can contain ``.`` in the case + of sub-modules or sub-packages, then, tools in the Python ecosystem will roughly + interpret this value as: + + .. code-block:: python + + import + parsed_value = + +Module-level object + If you supply:: + + = : + + where ```` does not contain any ``.``, this will be roughly interpreted + as: + + .. code-block:: python + + from import + parsed_value = + +Nested object + If you supply:: + + = :.. + + this will be roughly interpreted as: + + .. code-block:: python - [options.entry_points] - console_scripts = - hello-world = timmins:hello_world [pretty-printer] + from import + parsed_value = .. -In this case, the ``hello-world`` script is only viable if the ``pretty-printer`` -extra is indicated, and so a plugin host might exclude that entry point -(i.e. not install a console script) if the relevant extra dependencies are not -installed. +In the case of console/GUI scripts, this syntax can be used to specify a function, while +in the general case of entry points as used for plugins, it can be used to specify a function, +class or module. ---- diff --git a/docs/userguide/extension.rst b/docs/userguide/extension.rst index 21fb05b..f1dce94 100644 --- a/docs/userguide/extension.rst +++ b/docs/userguide/extension.rst @@ -1,60 +1,83 @@ .. _Creating ``distutils`` Extensions: -Creating ``distutils`` Extensions -================================= +Extending or Customizing Setuptools +=================================== -It can be hard to add new commands or setup arguments to the distutils. But -the ``setuptools`` package makes it a bit easier, by allowing you to distribute -a distutils extension as a separate project, and then have projects that need -the extension just refer to it in their ``setup_requires`` argument. +Setuptools design is based on the distutils_ package originally distributed +as part of Python's standard library, effectively serving as its successor +(as established in :pep:`632`). -With ``setuptools``, your distutils extension projects can hook in new +This means that ``setuptools`` strives to honor the extension mechanisms +provided by ``distutils``, and allows developers to create third party packages +that modify or augment the build process behavior. + +A simple way of doing that is to hook in new or existing commands and ``setup()`` arguments just by defining "entry points". These are mappings from command or argument names to a specification of where to import a handler from. (See the section on :ref:`Dynamic Discovery of -Services and Plugins` above for some more background on entry points.) - - -Adding Commands ---------------- - -You can add new ``setup`` commands by defining entry points in the -``distutils.commands`` group. For example, if you wanted to add a ``foo`` -command, you might add something like this to your distutils extension -project's setup script:: - - setup( - # ... - entry_points={ - "distutils.commands": [ - "foo = mypackage.some_module:foo", - ], - }, - ) +Services and Plugins` for some more background on entry points). + +The following sections describe the most common procedures for extending +the ``distutils`` functionality used by ``setuptools``. + +.. important:: + Any entry-point defined in your ``setup.cfg``, ``setup.py`` or + ``pyproject.toml`` files are not immediately available for use. Your + package needs to be installed first, then ``setuptools`` will be able to + access these entry points. For example consider a ``Project-A`` that + defines entry points. When building ``Project-A``, these will not be + available. If ``Project-B`` declares a :doc:`build system requirement + ` on ``Project-A``, then ``setuptools`` + will be able to use ``Project-A``' customizations. + +Customizing Commands +-------------------- + +Both ``setuptools`` and ``distutils`` are structured around the *command design +pattern*. This means that each main action executed when building a +distribution package (such as creating a :term:`sdist ` +or :term:`wheel`) correspond to the implementation of a Python class. + +Originally in ``distutils``, these commands would correspond to actual CLI +arguments that could be passed to the ``setup.py`` script to trigger a +different aspect of the build. In ``setuptools``, however, these command +objects are just a design abstraction that encapsulate logic and help to +organise the code. + +You can overwrite exiting commands (or add new ones) by defining entry +points in the ``distutils.commands`` group. For example, if you wanted to add +a ``foo`` command, you might add something like this to your project: + +.. code-block:: ini + + # setup.cfg + ... + [options.entry_points] + distutils.commands = + foo = mypackage.some_module:foo (Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is a ``setuptools.Command`` subclass.) Once a project containing such entry points has been activated on ``sys.path``, -(e.g. by running "install" or "develop" with a site-packages installation -directory) the command(s) will be available to any ``setuptools``-based setup -scripts. It is not necessary to use the ``--command-packages`` option or -to monkeypatch the ``distutils.command`` package to install your commands; -``setuptools`` automatically adds a wrapper to the distutils to search for -entry points in the active distributions on ``sys.path``. In fact, this is +(e.g. by running ``pip install``) the command(s) will be available to any +``setuptools``-based project. In fact, this is how setuptools' own commands are installed: the setuptools project's setup script defines entry points for them! -.. note:: - When creating commands, and specially when defining custom ways of building - compiled extensions (for example via ``build_ext``), consider - handling exceptions such as ``CompileError``, ``LinkError``, ``LibError``, - among others. These exceptions are available in the ``setuptools.errors`` - module. +The commands ``sdist``, ``build_py`` and ``build_ext`` are especially useful +to customize ``setuptools`` builds. Note however that when overwriting existing +commands, you should be very careful to maintain API compatibility. +Custom commands should try to replicate the same overall behavior as the +original classes, and when possible, even inherit from them. +You should also consider handling exceptions such as ``CompileError``, +``LinkError``, ``LibError``, among others. These exceptions are available in +the ``setuptools.errors`` module. -Adding ``setup()`` Arguments ----------------------------- + +Adding Arguments +---------------- .. warning:: Adding arguments to setup is discouraged as such arguments are only supported through imperative execution and not supported through @@ -64,19 +87,17 @@ Sometimes, your commands may need additional arguments to the ``setup()`` call. You can enable this by defining entry points in the ``distutils.setup_keywords`` group. For example, if you wanted a ``setup()`` argument called ``bar_baz``, you might add something like this to your -distutils extension project's setup script:: - - setup( - # ... - entry_points={ - "distutils.commands": [ - "foo = mypackage.some_module:foo", - ], - "distutils.setup_keywords": [ - "bar_baz = mypackage.some_module:validate_bar_baz", - ], - }, - ) +extension project: + +.. code-block:: ini + + # setup.cfg + ... + [options.entry_points] + distutils.commands = + foo = mypackage.some_module:foo + distutils.setup_keywords = + bar_baz = mypackage.some_module:validate_bar_baz The idea here is that the entry point defines a function that will be called to validate the ``setup()`` argument, if it's supplied. The ``Distribution`` @@ -93,7 +114,7 @@ a non-None value. Here's an example validation function:: Your function should accept three arguments: the ``Distribution`` object, the attribute name, and the attribute value. It should raise a -``DistutilsSetupError`` (from the ``distutils.errors`` module) if the argument +``SetupError`` (from the ``setuptools.errors`` module) if the argument is invalid. Remember, your function will only be called with non-None values, and the default value of arguments defined this way is always None. So, your commands should always be prepared for the possibility that the attribute will @@ -101,15 +122,9 @@ be ``None`` when they access it later. If more than one active distribution defines an entry point for the same ``setup()`` argument, *all* of them will be called. This allows multiple -distutils extensions to define a common argument, as long as they agree on +extensions to define a common argument, as long as they agree on what values of that argument are valid. -Also note that as with commands, it is not necessary to subclass or monkeypatch -the distutils ``Distribution`` class in order to add your arguments; it is -sufficient to define the entry points in your extension, as long as any setup -script using your extension lists your project in its ``setup_requires`` -argument. - Customizing Distribution Options -------------------------------- @@ -130,36 +145,48 @@ plugin is encouraged to load the configuration/settings for their behavior independently. +Defining Additional Metadata +---------------------------- + +Some extensible applications and frameworks may need to define their own kinds +of metadata, which they can then access using the :mod:`importlib.metadata` APIs. +Ordinarily, this is done by having plugin +developers include additional files in their ``ProjectName.egg-info`` +directory. However, since it can be tedious to create such files by hand, you +may want to create an extension that will create the necessary files +from arguments to ``setup()``, in much the same way that ``setuptools`` does +for many of the ``setup()`` arguments it adds. See the section below for more +details. + + .. _Adding new EGG-INFO Files: Adding new EGG-INFO Files -------------------------- +~~~~~~~~~~~~~~~~~~~~~~~~~ Some extensible applications or frameworks may want to allow third parties to develop plugins with application or framework-specific metadata included in the plugins' EGG-INFO directory, for easy access via the ``pkg_resources`` -metadata API. The easiest way to allow this is to create a distutils extension +metadata API. The easiest way to allow this is to create an extension to be used from the plugin projects' setup scripts (via ``setup_requires``) that defines a new setup keyword, and then uses that data to write an EGG-INFO file when the ``egg_info`` command is run. The ``egg_info`` command looks for extension points in an ``egg_info.writers`` -group, and calls them to write the files. Here's a simple example of a -distutils extension defining a setup argument ``foo_bar``, which is a list of +group, and calls them to write the files. Here's a simple example of an +extension defining a setup argument ``foo_bar``, which is a list of lines that will be written to ``foo_bar.txt`` in the EGG-INFO directory of any -project that uses the argument:: - - setup( - # ... - entry_points={ - "distutils.setup_keywords": [ - "foo_bar = setuptools.dist:assert_string_list", - ], - "egg_info.writers": [ - "foo_bar.txt = setuptools.command.egg_info:write_arg", - ], - }, - ) +project that uses the argument: + +.. code-block:: ini + + # setup.cfg + ... + [options.entry_points] + distutils.setup_keywords = + foo_bar = setuptools.dist:assert_string_list + egg_info.writers = + foo_bar.txt = setuptools.command.egg_info:write_arg This simple example makes use of two utility functions defined by setuptools for its own use: a routine to validate that a setup keyword is a sequence of @@ -179,11 +206,11 @@ write (e.g. ``foo_bar.txt``), and the actual full filename that should be written to. In general, writer functions should honor the command object's ``dry_run`` -setting when writing files, and use the ``distutils.log`` object to do any -console output. The easiest way to conform to this requirement is to use +setting when writing files, and use ``logging`` to do any console output. +The easiest way to conform to this requirement is to use the ``cmd`` object's ``write_file()``, ``delete_file()``, and -``write_or_delete_file()`` methods exclusively for your file operations. See -those methods' docstrings for more details. +``write_or_delete_file()`` methods exclusively for your file operations. +See those methods' docstrings for more details. .. _Adding Support for Revision Control Systems: @@ -212,13 +239,16 @@ called "foobar", you would write a function something like this: def find_files_for_foobar(dirname): ... # loop to yield paths that start with `dirname` -And you would register it in a setup script using something like this:: +And you would register it in a setup script using something like this: + +.. code-block:: ini + + # setup.cfg + ... - entry_points={ - "setuptools.file_finders": [ - "foobar = my_foobar_module:find_files_for_foobar", - ] - } + [options.entry_points] + setuptools.file_finders = + foobar = my_foobar_module:find_files_for_foobar Then, anyone who wants to use your plugin can simply install it, and their local setuptools installation will be able to find the necessary files. @@ -248,3 +278,18 @@ A few important points for writing revision control file finders: with the absence of needed programs (i.e., ones belonging to the revision control system itself. It *may*, however, use ``distutils.log.warn()`` to inform the user of the missing program(s). + + +.. _distutils: https://docs.python.org/3.9/library/distutils.html + + +Final Remarks +------------- + +* To use a ``setuptools`` plugin, your users will need to add your package as a + build requirement to their build-system configuration. Please check out our + guides on :doc:`/userguide/dependency_management` for more information. + +* Directly calling ``python setup.py ...`` is considered a **deprecated** practice. + You should not add new commands to ``setuptools`` expecting them to be run + via this interface. diff --git a/docs/userguide/functionalities_rewrite.rst b/docs/userguide/functionalities_rewrite.rst deleted file mode 100644 index d0997ca..0000000 --- a/docs/userguide/functionalities_rewrite.rst +++ /dev/null @@ -1,9 +0,0 @@ -======================================================== -Using setuptools to package and distribute your project -======================================================== - -``setuptools`` offers a variety of functionalities that make it easy to -build and distribute your python package. Here we provide an overview on -the commonly used ones. - - diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst index 74e9b1e..d5d150a 100644 --- a/docs/userguide/index.rst +++ b/docs/userguide/index.rst @@ -2,22 +2,24 @@ Building and Distributing Packages with Setuptools ================================================== -``Setuptools`` is a collection of enhancements to the Python ``distutils`` -that allow developers to more easily build and -distribute Python packages, especially ones that have dependencies on other -packages. +The first step towards sharing a Python library or program is to build a +distribution package [#package-overload]_. This includes adding a set of +additional files containing metadata and configuration to not only instruct +``setuptools`` on how the distribution should be built but also +to help installer (such as :pypi:`pip`) during the installation process. -Packages built and distributed using ``setuptools`` look to the user like -ordinary Python packages based on the ``distutils``. +This document contains information to help Python developers through this +process. Please check the :doc:`/userguide/quickstart` for an overview of +the workflow. -Transition to PEP517 -==================== +Also note that ``setuptools`` is what is know in the community as :pep:`build +backend <517#terminology-and-goals>`, user facing interfaces are provided by tools +such as :pypi:`pip` and :pypi:`build`. To use ``setuptools``, one must +explicitly create a ``pyproject.toml`` file as described :doc:`/build_meta`. -Since setuptools no longer serves as the default build tool, one must explicitly -opt in (by providing a :file:`pyproject.toml` file) to use this library. The user -facing part is provided by tools such as pip and -backend interface is described :doc:`in this document <../build_meta>`. The -quickstart provides an overview of the new workflow. + +Contents +======== .. toctree:: :maxdepth: 1 @@ -33,5 +35,14 @@ quickstart provides an overview of the new workflow. declarative_config pyproject_config commands - functionalities_rewrite miscellaneous + +--- + +.. rubric:: Notes + +.. [#package-overload] + A :term:`Distribution Package` is also referred in the Python community simply as "package" + Unfortunately, this jargon might be a bit confusing for new users because the term package + can also to refer any :term:`directory ` (or sub directory) used to organize + :term:`modules ` and auxiliary files. diff --git a/docs/userguide/miscellaneous.rst b/docs/userguide/miscellaneous.rst index 5fd2f0a..776f12f 100644 --- a/docs/userguide/miscellaneous.rst +++ b/docs/userguide/miscellaneous.rst @@ -1,105 +1,7 @@ -.. _Automatic Resource Extraction: - -Automatic Resource Extraction ------------------------------ - -If you are using tools that expect your resources to be "real" files, or your -project includes non-extension native libraries or other files that your C -extensions expect to be able to access, you may need to list those files in -the ``eager_resources`` argument to ``setup()``, so that the files will be -extracted together, whenever a C extension in the project is imported. - -This is especially important if your project includes shared libraries *other* -than distutils-built C extensions, and those shared libraries use file -extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the -extensions that setuptools 0.6a8 and higher automatically detects as shared -libraries and adds to the ``native_libs.txt`` file for you. Any shared -libraries whose names do not end with one of those extensions should be listed -as ``eager_resources``, because they need to be present in the filesystem when -he C extensions that link to them are used. - -The ``pkg_resources`` runtime for compressed packages will automatically -extract *all* C extensions and ``eager_resources`` at the same time, whenever -*any* C extension or eager resource is requested via the ``resource_filename()`` -API. (C extensions are imported using ``resource_filename()`` internally.) -This ensures that C extensions will see all of the "real" files that they -expect to see. - -Note also that you can list directory resource names in ``eager_resources`` as -well, in which case the directory's contents (including subdirectories) will be -extracted whenever any C extension or eager resource is requested. - -Please note that if you're not sure whether you need to use this argument, you -don't! It's really intended to support projects with lots of non-Python -dependencies and as a last resort for crufty projects that can't otherwise -handle being compressed. If your package is pure Python, Python plus data -files, or Python plus C, you really don't need this. You've got to be using -either C or an external program that needs "real" files in your project before -there's any possibility of ``eager_resources`` being relevant to your project. - -Defining Additional Metadata ----------------------------- - -Some extensible applications and frameworks may need to define their own kinds -of metadata to include in eggs, which they can then access using the -``pkg_resources`` metadata APIs. Ordinarily, this is done by having plugin -developers include additional files in their ``ProjectName.egg-info`` -directory. However, since it can be tedious to create such files by hand, you -may want to create a distutils extension that will create the necessary files -from arguments to ``setup()``, in much the same way that ``setuptools`` does -for many of the ``setup()`` arguments it adds. See the section below on -:ref:`Creating ``distutils\`\` Extensions` for more details, especially the -subsection on :ref:`Adding new EGG-INFO Files`. - -Setting the ``zip_safe`` flag ------------------------------ - -For some use cases (such as bundling as part of a larger application), Python -packages may be run directly from a zip file. -Not all packages, however, are capable of running in compressed form, because -they may expect to be able to access either source code or data files as -normal operating system files. So, ``setuptools`` can install your project -as a zipfile or a directory, and its default choice is determined by the -project's ``zip_safe`` flag. - -You can pass a True or False value for the ``zip_safe`` argument to the -``setup()`` function, or you can omit it. If you omit it, the ``bdist_egg`` -command will analyze your project's contents to see if it can detect any -conditions that would prevent it from working in a zipfile. It will output -notices to the console about any such conditions that it finds. - -Currently, this analysis is extremely conservative: it will consider the -project unsafe if it contains any C extensions or datafiles whatsoever. This -does *not* mean that the project can't or won't work as a zipfile! It just -means that the ``bdist_egg`` authors aren't yet comfortable asserting that -the project *will* work. If the project contains no C or data files, and does -no ``__file__`` or ``__path__`` introspection or source code manipulation, then -there is an extremely solid chance the project will work when installed as a -zipfile. (And if the project uses ``pkg_resources`` for all its data file -access, then C extensions and other data files shouldn't be a problem at all. -See the :ref:`Accessing Data Files at Runtime` section above for more information.) - -However, if ``bdist_egg`` can't be *sure* that your package will work, but -you've checked over all the warnings it issued, and you are either satisfied it -*will* work (or if you want to try it for yourself), then you should set -``zip_safe`` to ``True`` in your ``setup()`` call. If it turns out that it -doesn't work, you can always change it to ``False``, which will force -``setuptools`` to install your project as a directory rather than as a zipfile. - -In the future, as we gain more experience with different packages and become -more satisfied with the robustness of the ``pkg_resources`` runtime, the -"zip safety" analysis may become less conservative. However, we strongly -recommend that you determine for yourself whether your project functions -correctly when installed as a zipfile, correct any problems if you can, and -then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe`` -flag, so that it will not be necessary for ``bdist_egg`` to try to guess -whether your project can work as a zipfile. - - .. _Controlling files in the distribution: Controlling files in the distribution -------------------------------------- +===================================== For the most common use cases, ``setuptools`` will automatically find out which files are necessary for distributing the package. diff --git a/docs/userguide/quickstart.rst b/docs/userguide/quickstart.rst index c4757b9..0b75947 100644 --- a/docs/userguide/quickstart.rst +++ b/docs/userguide/quickstart.rst @@ -1,6 +1,6 @@ -========================== -``setuptools`` Quickstart -========================== +========== +Quickstart +========== Installation ============ @@ -286,10 +286,8 @@ For more advanced use, see :doc:`dependency_management`. Including Data Files ==================== -The distutils have traditionally allowed installation of "data files", which -are placed in a platform-specific location. Setuptools offers three ways to -specify data files to be included in your packages. For the simplest use, you -can simply use the ``include_package_data`` keyword: +Setuptools offers three ways to specify data files to be included in your packages. +For the simplest use, you can simply use the ``include_package_data`` keyword: .. tab:: setup.cfg @@ -319,7 +317,7 @@ can simply use the ``include_package_data`` keyword: # You can deactivate that with `include-package-data = false` This tells setuptools to install any data files it finds in your packages. -The data files must be specified via the distutils' |MANIFEST.in|_ file +The data files must be specified via the |MANIFEST.in|_ file or automatically added by a :ref:`Revision Control System plugin `. For more details, see :doc:`datafiles`. diff --git a/setup.cfg b/setup.cfg index e1037d6..dd7b5a4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = setuptools -version = 62.3.4 +version = 62.4.0 author = Python Packaging Authority author_email = distutils-sig@python.org description = Easily download, build, install, upgrade, and uninstall Python packages @@ -106,6 +106,7 @@ distutils.commands = alias = setuptools.command.alias:alias bdist_egg = setuptools.command.bdist_egg:bdist_egg bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm + build = setuptools.command.build:build build_clib = setuptools.command.build_clib:build_clib build_ext = setuptools.command.build_ext:build_ext build_py = setuptools.command.build_py:build_py diff --git a/setuptools/_distutils/__init__.py b/setuptools/_distutils/__init__.py index 8fd493b..b3ac014 100644 --- a/setuptools/_distutils/__init__.py +++ b/setuptools/_distutils/__init__.py @@ -11,7 +11,7 @@ used from a setup script as import sys import importlib -__version__ = sys.version[:sys.version.index(' ')] +__version__ = sys.version[: sys.version.index(' ')] try: diff --git a/setuptools/_distutils/_msvccompiler.py b/setuptools/_distutils/_msvccompiler.py index f2f801c..3b5a817 100644 --- a/setuptools/_distutils/_msvccompiler.py +++ b/setuptools/_distutils/_msvccompiler.py @@ -18,23 +18,30 @@ import subprocess import contextlib import warnings import unittest.mock + with contextlib.suppress(ImportError): import winreg -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError +from distutils.errors import ( + DistutilsExecError, + DistutilsPlatformError, + CompileError, + LibError, + LinkError, +) from distutils.ccompiler import CCompiler, gen_lib_options from distutils import log from distutils.util import get_platform from itertools import count + def _find_vc2015(): try: key = winreg.OpenKeyEx( winreg.HKEY_LOCAL_MACHINE, r"Software\Microsoft\VisualStudio\SxS\VC7", - access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY + access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY, ) except OSError: log.debug("Visual C++ is not registered") @@ -57,6 +64,7 @@ def _find_vc2015(): best_version, best_dir = version, vc_dir return best_version, best_dir + def _find_vc2017(): """Returns "15, path" based on the result of invoking vswhere.exe If no install is found, returns "None, None" @@ -72,14 +80,23 @@ def _find_vc2017(): return None, None try: - path = subprocess.check_output([ - os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), - "-latest", - "-prerelease", - "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "-property", "installationPath", - "-products", "*", - ], encoding="mbcs", errors="strict").strip() + path = subprocess.check_output( + [ + os.path.join( + root, "Microsoft Visual Studio", "Installer", "vswhere.exe" + ), + "-latest", + "-prerelease", + "-requires", + "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "-property", + "installationPath", + "-products", + "*", + ], + encoding="mbcs", + errors="strict", + ).strip() except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): return None, None @@ -89,13 +106,15 @@ def _find_vc2017(): return None, None + PLAT_SPEC_TO_RUNTIME = { - 'x86' : 'x86', - 'x86_amd64' : 'x64', - 'x86_arm' : 'arm', - 'x86_arm64' : 'arm64' + 'x86': 'x86', + 'x86_amd64': 'x64', + 'x86_arm': 'arm', + 'x86_arm64': 'arm64', } + def _find_vcvarsall(plat_spec): # bpo-38597: Removed vcruntime return value _, best_dir = _find_vc2017() @@ -114,12 +133,10 @@ def _find_vcvarsall(plat_spec): return vcvarsall, None + def _get_vc_env(plat_spec): if os.getenv("DISTUTILS_USE_SDK"): - return { - key.lower(): value - for key, value in os.environ.items() - } + return {key.lower(): value for key, value in os.environ.items()} vcvarsall, _ = _find_vcvarsall(plat_spec) if not vcvarsall: @@ -132,18 +149,17 @@ def _get_vc_env(plat_spec): ).decode('utf-16le', errors='replace') except subprocess.CalledProcessError as exc: log.error(exc.output) - raise DistutilsPlatformError("Error executing {}" - .format(exc.cmd)) + raise DistutilsPlatformError("Error executing {}".format(exc.cmd)) env = { key.lower(): value - for key, _, value in - (line.partition('=') for line in out.splitlines()) + for key, _, value in (line.partition('=') for line in out.splitlines()) if key and value } return env + def _find_exe(exe, paths=None): """Return path to an MSVC executable program. @@ -161,19 +177,21 @@ def _find_exe(exe, paths=None): return fn return exe + # A map keyed by get_platform() return values to values accepted by # 'vcvarsall.bat'. Always cross-compile from x86 to work with the # lighter-weight MSVC installs that do not include native 64-bit tools. PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'x86_amd64', - 'win-arm32' : 'x86_arm', - 'win-arm64' : 'x86_arm64' + 'win32': 'x86', + 'win-amd64': 'x86_amd64', + 'win-arm32': 'x86_arm', + 'win-arm64': 'x86_arm64', } -class MSVCCompiler(CCompiler) : + +class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" + as defined by the CCompiler abstract class.""" compiler_type = 'msvc' @@ -192,8 +210,7 @@ class MSVCCompiler(CCompiler) : # Needed for the filename generation methods provided by the # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) + src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions res_extension = '.res' obj_extension = '.obj' static_lib_extension = '.lib' @@ -201,7 +218,6 @@ class MSVCCompiler(CCompiler) : static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' - def __init__(self, verbose=0, dry_run=0, force=0): super().__init__(verbose, dry_run, force) # target platform (.plat_name is consistent with 'bdist') @@ -215,25 +231,27 @@ class MSVCCompiler(CCompiler) : plat_name = get_platform() # sanity check for platforms to prevent obscure errors later. if plat_name not in PLAT_TO_VCVARS: - raise DistutilsPlatformError("--plat-name must be one of {}" - .format(tuple(PLAT_TO_VCVARS))) + raise DistutilsPlatformError( + "--plat-name must be one of {}".format(tuple(PLAT_TO_VCVARS)) + ) # Get the vcvarsall.bat spec for the requested platform. plat_spec = PLAT_TO_VCVARS[plat_name] vc_env = _get_vc_env(plat_spec) if not vc_env: - raise DistutilsPlatformError("Unable to find a compatible " - "Visual Studio installation.") + raise DistutilsPlatformError( + "Unable to find a compatible " "Visual Studio installation." + ) self._paths = vc_env.get('path', '') paths = self._paths.split(os.pathsep) self.cc = _find_exe("cl.exe", paths) self.linker = _find_exe("link.exe", paths) self.lib = _find_exe("lib.exe", paths) - self.rc = _find_exe("rc.exe", paths) # resource compiler - self.mc = _find_exe("mc.exe", paths) # message compiler - self.mt = _find_exe("mt.exe", paths) # message compiler + self.rc = _find_exe("rc.exe", paths) # resource compiler + self.mc = _find_exe("mc.exe", paths) # message compiler + self.mt = _find_exe("mt.exe", paths) # message compiler for dir in vc_env.get('include', '').split(os.pathsep): if dir: @@ -247,26 +265,35 @@ class MSVCCompiler(CCompiler) : # bpo-38597: Always compile with dynamic linking # Future releases of Python 3.x will include all past # versions of vcruntime*.dll for compatibility. - self.compile_options = [ - '/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD' - ] + self.compile_options = ['/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD'] self.compile_options_debug = [ - '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG' + '/nologo', + '/Od', + '/MDd', + '/Zi', + '/W3', + '/D_DEBUG', ] - ldflags = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG' - ] + ldflags = ['/nologo', '/INCREMENTAL:NO', '/LTCG'] - ldflags_debug = [ - '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL' - ] + ldflags_debug = ['/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL'] self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1'] self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1'] - self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] - self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] + self.ldflags_shared = [ + *ldflags, + '/DLL', + '/MANIFEST:EMBED,ID=2', + '/MANIFESTUAC:NO', + ] + self.ldflags_shared_debug = [ + *ldflags_debug, + '/DLL', + '/MANIFEST:EMBED,ID=2', + '/MANIFESTUAC:NO', + ] self.ldflags_static = [*ldflags] self.ldflags_static_debug = [*ldflags_debug] @@ -286,13 +313,13 @@ class MSVCCompiler(CCompiler) : # -- Worker methods ------------------------------------------------ - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): ext_map = { **{ext: self.obj_extension for ext in self.src_extensions}, - **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions}, + **{ + ext: self.res_extension + for ext in self._rc_extensions + self._mc_extensions + }, } output_dir = output_dir or '' @@ -318,15 +345,23 @@ class MSVCCompiler(CCompiler) : return list(map(make_out_path, source_filenames)) - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): + def compile( + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): if not self.initialized: self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) + compile_info = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) macros, objects, extra_postargs, pp_opts, build = compile_info compile_opts = extra_preargs or [] @@ -336,7 +371,6 @@ class MSVCCompiler(CCompiler) : else: compile_opts.extend(self.compile_options) - add_cpp_opts = False for obj in objects: @@ -381,7 +415,7 @@ class MSVCCompiler(CCompiler) : try: # first compile .MC to .RC and .H file self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src]) - base, _ = os.path.splitext(os.path.basename (src)) + base, _ = os.path.splitext(os.path.basename(src)) rc_file = os.path.join(rc_dir, base + '.rc') # then compile .RC to .RES file self.spawn([self.rc, "/fo" + obj, rc_file]) @@ -391,8 +425,9 @@ class MSVCCompiler(CCompiler) : continue else: # how to handle this file? - raise CompileError("Don't know how to compile {} to {}" - .format(src, obj)) + raise CompileError( + "Don't know how to compile {} to {}".format(src, obj) + ) args = [self.cc] + compile_opts + pp_opts if add_cpp_opts: @@ -408,24 +443,19 @@ class MSVCCompiler(CCompiler) : return objects - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): if not self.initialized: self.initialize() objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): lib_args = objects + ['/OUT:' + output_filename] if debug: - pass # XXX what goes here? + pass # XXX what goes here? try: log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args)) self.spawn([self.lib] + lib_args) @@ -434,36 +464,36 @@ class MSVCCompiler(CCompiler) : else: log.debug("skipping %s (up-to-date)", output_filename) - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): if not self.initialized: self.initialize() objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) + fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) libraries, library_dirs, runtime_library_dirs = fixed_args if runtime_library_dirs: - self.warn("I don't know what to do with 'runtime_library_dirs': " - + str(runtime_library_dirs)) + self.warn( + "I don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs) + ) - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) @@ -472,8 +502,9 @@ class MSVCCompiler(CCompiler) : export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])] - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) + ld_args = ( + ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] + ) # The MSVC linker generates .lib and .exp files, which cannot be # suppressed by any linker switches. The .lib files may even be @@ -483,11 +514,10 @@ class MSVCCompiler(CCompiler) : build_temp = os.path.dirname(objects[0]) if export_symbols is not None: (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) + os.path.basename(output_filename) + ) + implib_file = os.path.join(build_temp, self.library_filename(dll_name)) + ld_args.append('/IMPLIB:' + implib_file) if extra_preargs: ld_args[:0] = extra_preargs @@ -525,8 +555,7 @@ class MSVCCompiler(CCompiler) : raise else: return - warnings.warn( - "Fallback spawn triggered. Please update distutils monkeypatch.") + warnings.warn("Fallback spawn triggered. Please update distutils monkeypatch.") with unittest.mock.patch.dict('os.environ', env): bag.value = super().spawn(cmd) @@ -539,7 +568,8 @@ class MSVCCompiler(CCompiler) : def runtime_library_dir_option(self, dir): raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC") + "don't know how to set runtime library search path for MSVC" + ) def library_option(self, lib): return self.library_filename(lib) diff --git a/setuptools/_distutils/archive_util.py b/setuptools/_distutils/archive_util.py index 565a311..5a70c32 100644 --- a/setuptools/_distutils/archive_util.py +++ b/setuptools/_distutils/archive_util.py @@ -28,6 +28,7 @@ try: except ImportError: getgrnam = None + def _get_gid(name): """Returns a gid, given a group name.""" if getgrnam is None or name is None: @@ -40,6 +41,7 @@ def _get_gid(name): return result[2] return None + def _get_uid(name): """Returns an uid, given a user name.""" if getpwnam is None or name is None: @@ -52,8 +54,10 @@ def _get_uid(name): return result[2] return None -def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, - owner=None, group=None): + +def make_tarball( + base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None +): """Create a (possibly compressed) tar file from all the files under 'base_dir'. @@ -69,16 +73,21 @@ def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, Returns the output filename. """ - tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '', - 'compress': ''} - compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', - 'compress': '.Z'} + tar_compression = { + 'gzip': 'gz', + 'bzip2': 'bz2', + 'xz': 'xz', + None: '', + 'compress': '', + } + compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', 'compress': '.Z'} # flags for compression program, each element of list will be an argument if compress is not None and compress not in compress_ext.keys(): raise ValueError( - "bad value for 'compress': must be None, 'gzip', 'bzip2', " - "'xz' or 'compress'") + "bad value for 'compress': must be None, 'gzip', 'bzip2', " + "'xz' or 'compress'" + ) archive_name = base_name + '.tar' if compress != 'compress': @@ -124,6 +133,7 @@ def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, return archive_name + def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): """Create a zip file from all the files under 'base_dir'. @@ -145,26 +155,29 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): zipoptions = "-rq" try: - spawn(["zip", zipoptions, zip_filename, base_dir], - dry_run=dry_run) + spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) except DistutilsExecError: # XXX really should distinguish between "couldn't find # external 'zip' command" and "zip failed". - raise DistutilsExecError(("unable to create zip file '%s': " - "could neither import the 'zipfile' module nor " - "find a standalone zip utility") % zip_filename) + raise DistutilsExecError( + ( + "unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility" + ) + % zip_filename + ) else: - log.info("creating '%s' and adding '%s' to it", - zip_filename, base_dir) + log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) if not dry_run: try: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_DEFLATED) + zip = zipfile.ZipFile( + zip_filename, "w", compression=zipfile.ZIP_DEFLATED + ) except RuntimeError: - zip = zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_STORED) + zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_STORED) with zip: if base_dir != os.curdir: @@ -184,14 +197,16 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): return zip_filename + ARCHIVE_FORMATS = { 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), 'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"), - 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), - 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), - 'zip': (make_zipfile, [],"ZIP file") - } + 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), + 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (make_zipfile, [], "ZIP file"), +} + def check_archive_formats(formats): """Returns the first format from the 'format' list that is unknown. @@ -203,8 +218,17 @@ def check_archive_formats(formats): return format return None -def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, - dry_run=0, owner=None, group=None): + +def make_archive( + base_name, + format, + root_dir=None, + base_dir=None, + verbose=0, + dry_run=0, + owner=None, + group=None, +): """Create an archive file (eg. zip or tar). 'base_name' is the name of the file to create, minus any format-specific diff --git a/setuptools/_distutils/bcppcompiler.py b/setuptools/_distutils/bcppcompiler.py index 2eb6d2e..056b2d9 100644 --- a/setuptools/_distutils/bcppcompiler.py +++ b/setuptools/_distutils/bcppcompiler.py @@ -13,16 +13,20 @@ for the Borland C++ compiler. import os -from distutils.errors import \ - DistutilsExecError, \ - CompileError, LibError, LinkError, UnknownFileError -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options +from distutils.errors import ( + DistutilsExecError, + CompileError, + LibError, + LinkError, + UnknownFileError, +) +from distutils.ccompiler import CCompiler, gen_preprocess_options from distutils.file_util import write_file from distutils.dep_util import newer from distutils import log -class BCPPCompiler(CCompiler) : + +class BCPPCompiler(CCompiler): """Concrete class that implements an interface to the Borland C/C++ compiler, as defined by the CCompiler abstract class. """ @@ -49,11 +53,7 @@ class BCPPCompiler(CCompiler) : static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' - - def __init__ (self, - verbose=0, - dry_run=0, - force=0): + def __init__(self, verbose=0, dry_run=0, force=0): super().__init__(verbose, dry_run, force) @@ -73,24 +73,31 @@ class BCPPCompiler(CCompiler) : self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] self.ldflags_static = [] self.ldflags_exe = ['/Gn', '/q', '/x'] - self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r'] - + self.ldflags_exe_debug = ['/Gn', '/q', '/x', '/r'] # -- Worker methods ------------------------------------------------ - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): - - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) + def compile( + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): + + macros, objects, extra_postargs, pp_opts, build = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) compile_opts = extra_preargs or [] - compile_opts.append ('-c') + compile_opts.append('-c') if debug: - compile_opts.extend (self.compile_options_debug) + compile_opts.extend(self.compile_options_debug) else: - compile_opts.extend (self.compile_options) + compile_opts.extend(self.compile_options) for obj in objects: try: @@ -106,14 +113,14 @@ class BCPPCompiler(CCompiler) : if ext == '.res': # This is already a binary file -- skip it. - continue # the 'for' loop + continue # the 'for' loop if ext == '.rc': # This needs to be compiled to a .res file -- do it now. try: - self.spawn (["brcc32", "-fo", obj, src]) + self.spawn(["brcc32", "-fo", obj, src]) except DistutilsExecError as msg: raise CompileError(msg) - continue # the 'for' loop + continue # the 'for' loop # The next two are both for the real compiler. if ext in self._c_extensions: @@ -132,9 +139,14 @@ class BCPPCompiler(CCompiler) : # Note that the source file names must appear at the end of # the command line. try: - self.spawn ([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs + [src]) + self.spawn( + [self.cc] + + compile_opts + + pp_opts + + [input_opt, output_opt] + + extra_postargs + + [src] + ) except DistutilsExecError as msg: raise CompileError(msg) @@ -142,24 +154,19 @@ class BCPPCompiler(CCompiler) : # compile () + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): - def create_static_lib (self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): - - (objects, output_dir) = self._fix_object_args (objects, output_dir) - output_filename = \ - self.library_filename (output_libname, output_dir=output_dir) + (objects, output_dir) = self._fix_object_args(objects, output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) - if self._need_link (objects, output_filename): + if self._need_link(objects, output_filename): lib_args = [output_filename, '/u'] + objects if debug: - pass # XXX what goes here? + pass # XXX what goes here? try: - self.spawn ([self.lib] + lib_args) + self.spawn([self.lib] + lib_args) except DistutilsExecError as msg: raise LibError(msg) else: @@ -167,37 +174,41 @@ class BCPPCompiler(CCompiler) : # create_static_lib () - - def link (self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): # XXX this ignores 'build_temp'! should follow the lead of # msvccompiler.py - (objects, output_dir) = self._fix_object_args (objects, output_dir) - (libraries, library_dirs, runtime_library_dirs) = \ - self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) + (objects, output_dir) = self._fix_object_args(objects, output_dir) + (libraries, library_dirs, runtime_library_dirs) = self._fix_lib_args( + libraries, library_dirs, runtime_library_dirs + ) if runtime_library_dirs: - log.warn("I don't know what to do with 'runtime_library_dirs': %s", - str(runtime_library_dirs)) + log.warn( + "I don't know what to do with 'runtime_library_dirs': %s", + str(runtime_library_dirs), + ) if output_dir is not None: - output_filename = os.path.join (output_dir, output_filename) + output_filename = os.path.join(output_dir, output_filename) - if self._need_link (objects, output_filename): + if self._need_link(objects, output_filename): # Figure out linker args based on type of target. if target_desc == CCompiler.EXECUTABLE: @@ -213,20 +224,18 @@ class BCPPCompiler(CCompiler) : else: ld_args = self.ldflags_shared[:] - # Create a temporary exports file for use by the linker if export_symbols is None: def_file = '' else: - head, tail = os.path.split (output_filename) - modname, ext = os.path.splitext (tail) - temp_dir = os.path.dirname(objects[0]) # preserve tree structure - def_file = os.path.join (temp_dir, '%s.def' % modname) + head, tail = os.path.split(output_filename) + modname, ext = os.path.splitext(tail) + temp_dir = os.path.dirname(objects[0]) # preserve tree structure + def_file = os.path.join(temp_dir, '%s.def' % modname) contents = ['EXPORTS'] - for sym in (export_symbols or []): + for sym in export_symbols or []: contents.append(' %s=_%s' % (sym, sym)) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) + self.execute(write_file, (def_file, contents), "writing %s" % def_file) # Borland C++ has problems with '/' in paths objects2 = map(os.path.normpath, objects) @@ -241,10 +250,9 @@ class BCPPCompiler(CCompiler) : else: objects.append(file) - for l in library_dirs: ld_args.append("/L%s" % os.path.normpath(l)) - ld_args.append("/L.") # we sometimes use relative paths + ld_args.append("/L.") # we sometimes use relative paths # list of object files ld_args.extend(objects) @@ -260,7 +268,7 @@ class BCPPCompiler(CCompiler) : # them. Arghghh!. Apparently it works fine as coded... # name of dll/exe file - ld_args.extend([',',output_filename]) + ld_args.extend([',', output_filename]) # no map file and start libraries ld_args.append(',,') @@ -276,24 +284,23 @@ class BCPPCompiler(CCompiler) : ld_args.append(libfile) # some default libraries - ld_args.append ('import32') - ld_args.append ('cw32mt') + ld_args.append('import32') + ld_args.append('cw32mt') # def file for export symbols - ld_args.extend([',',def_file]) + ld_args.extend([',', def_file]) # add resource files ld_args.append(',') ld_args.extend(resources) - if extra_preargs: ld_args[:0] = extra_preargs if extra_postargs: ld_args.extend(extra_postargs) - self.mkpath (os.path.dirname (output_filename)) + self.mkpath(os.path.dirname(output_filename)) try: - self.spawn ([self.linker] + ld_args) + self.spawn([self.linker] + ld_args) except DistutilsExecError as msg: raise LinkError(msg) @@ -304,8 +311,7 @@ class BCPPCompiler(CCompiler) : # -- Miscellaneous methods ----------------------------------------- - - def find_library_file (self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=0): # List of effective library names to try, in order of preference: # xxx_bcpp.lib is better than xxx.lib # and xxx_d.lib is better than xxx.lib if debug is set @@ -316,7 +322,7 @@ class BCPPCompiler(CCompiler) : # compiler they care about, since (almost?) every Windows compiler # seems to have a different format for static libraries. if debug: - dlib = (lib + "_d") + dlib = lib + "_d" try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) else: try_names = (lib + "_bcpp", lib) @@ -331,43 +337,42 @@ class BCPPCompiler(CCompiler) : return None # overwrite the one from CCompiler to support rc and res-files - def object_filenames (self, - source_filenames, - strip_dir=0, - output_dir=''): - if output_dir is None: output_dir = '' + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): + if output_dir is None: + output_dir = '' obj_names = [] for src_name in source_filenames: # use normcase to make sure '.rc' is really '.rc' and not '.RC' - (base, ext) = os.path.splitext (os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) + (base, ext) = os.path.splitext(os.path.normcase(src_name)) + if ext not in (self.src_extensions + ['.rc', '.res']): + raise UnknownFileError( + "unknown file type '%s' (from '%s')" % (ext, src_name) + ) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext == '.res': # these can go unchanged - obj_names.append (os.path.join (output_dir, base + ext)) + obj_names.append(os.path.join(output_dir, base + ext)) elif ext == '.rc': # these need to be compiled to .res-files - obj_names.append (os.path.join (output_dir, base + '.res')) + obj_names.append(os.path.join(output_dir, base + '.res')) else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, base + self.obj_extension)) return obj_names # object_filenames () - def preprocess (self, - source, - output_file=None, - macros=None, - include_dirs=None, - extra_preargs=None, - extra_postargs=None): - - (_, macros, include_dirs) = \ - self._fix_compile_args(None, macros, include_dirs) + def preprocess( + self, + source, + output_file=None, + macros=None, + include_dirs=None, + extra_preargs=None, + extra_postargs=None, + ): + + (_, macros, include_dirs) = self._fix_compile_args(None, macros, include_dirs) pp_opts = gen_preprocess_options(macros, include_dirs) pp_args = ['cpp32.exe'] + pp_opts if output_file is not None: diff --git a/setuptools/_distutils/ccompiler.py b/setuptools/_distutils/ccompiler.py index 777fc66..005b64a 100644 --- a/setuptools/_distutils/ccompiler.py +++ b/setuptools/_distutils/ccompiler.py @@ -12,6 +12,7 @@ from distutils.dep_util import newer_group from distutils.util import split_quoted, execute from distutils import log + class CCompiler: """Abstract base class to define the interface that must be implemented by real compiler classes. Also has some utility methods used by @@ -56,17 +57,16 @@ class CCompiler: # think this is useless without the ability to null out the # library search path anyways. - # Subclasses that rely on the standard filename generation methods # implemented below should override these; see the comment near # those methods ('object_filenames()' et. al.) for details: - src_extensions = None # list of strings - obj_extension = None # string + src_extensions = None # list of strings + obj_extension = None # string static_lib_extension = None - shared_lib_extension = None # string - static_lib_format = None # format string - shared_lib_format = None # prob. same as static_lib_format - exe_extension = None # string + shared_lib_extension = None # string + static_lib_format = None # format string + shared_lib_format = None # prob. same as static_lib_format + exe_extension = None # string # Default language settings. language_map is used to detect a source # file or Extension target language, checking source filenames. @@ -74,12 +74,13 @@ class CCompiler: # what language to use when mixing source types. For example, if some # extension has two files with ".c" extension, and one with ".cpp", it # is still linked as c++. - language_map = {".c" : "c", - ".cc" : "c++", - ".cpp" : "c++", - ".cxx" : "c++", - ".m" : "objc", - } + language_map = { + ".c": "c", + ".cc": "c++", + ".cpp": "c++", + ".cxx": "c++", + ".m": "objc", + } language_order = ["c++", "objc", "c"] def __init__(self, verbose=0, dry_run=0, force=0): @@ -146,8 +147,10 @@ class CCompiler: for key in kwargs: if key not in self.executables: - raise ValueError("unknown executable '%s' for class %s" % - (key, self.__class__.__name__)) + raise ValueError( + "unknown executable '%s' for class %s" + % (key, self.__class__.__name__) + ) self.set_executable(key, kwargs[key]) def set_executable(self, key, value): @@ -170,14 +173,19 @@ class CCompiler: nothing if all definitions are OK, raise TypeError otherwise. """ for defn in definitions: - if not (isinstance(defn, tuple) and - (len(defn) in (1, 2) and - (isinstance (defn[1], str) or defn[1] is None)) and - isinstance (defn[0], str)): - raise TypeError(("invalid macro definition '%s': " % defn) + \ - "must be tuple (string,), (string, string), or " + \ - "(string, None)") - + if not ( + isinstance(defn, tuple) + and ( + len(defn) in (1, 2) + and (isinstance(defn[1], str) or defn[1] is None) + ) + and isinstance(defn[0], str) + ): + raise TypeError( + ("invalid macro definition '%s': " % defn) + + "must be tuple (string,), (string, string), or " + + "(string, None)" + ) # -- Bookkeeping methods ------------------------------------------- @@ -190,7 +198,7 @@ class CCompiler: """ # Delete from the list of macro definitions/undefinitions if # already there (so that this one will take precedence). - i = self._find_macro (name) + i = self._find_macro(name) if i is not None: del self.macros[i] @@ -207,7 +215,7 @@ class CCompiler: """ # Delete from the list of macro definitions/undefinitions if # already there (so that this one will take precedence). - i = self._find_macro (name) + i = self._find_macro(name) if i is not None: del self.macros[i] @@ -301,14 +309,12 @@ class CCompiler: """ self.objects = objects[:] - # -- Private utility methods -------------------------------------- # (here for the convenience of subclasses) # Helper method to prep compiler in subclass compile() methods - def _setup_compile(self, outdir, macros, incdirs, sources, depends, - extra): + def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra): """Process arguments and decide which source files to compile.""" if outdir is None: outdir = self.output_dir @@ -327,15 +333,13 @@ class CCompiler: elif isinstance(incdirs, (list, tuple)): incdirs = list(incdirs) + (self.include_dirs or []) else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") + raise TypeError("'include_dirs' (if supplied) must be a list of strings") if extra is None: extra = [] # Get the list of expected output (object) files - objects = self.object_filenames(sources, strip_dir=0, - output_dir=outdir) + objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir) assert len(objects) == len(sources) pp_opts = gen_preprocess_options(macros, incdirs) @@ -386,8 +390,7 @@ class CCompiler: elif isinstance(include_dirs, (list, tuple)): include_dirs = list(include_dirs) + (self.include_dirs or []) else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") + raise TypeError("'include_dirs' (if supplied) must be a list of strings") return output_dir, macros, include_dirs @@ -434,27 +437,27 @@ class CCompiler: if libraries is None: libraries = self.libraries elif isinstance(libraries, (list, tuple)): - libraries = list (libraries) + (self.libraries or []) + libraries = list(libraries) + (self.libraries or []) else: - raise TypeError( - "'libraries' (if supplied) must be a list of strings") + raise TypeError("'libraries' (if supplied) must be a list of strings") if library_dirs is None: library_dirs = self.library_dirs elif isinstance(library_dirs, (list, tuple)): - library_dirs = list (library_dirs) + (self.library_dirs or []) + library_dirs = list(library_dirs) + (self.library_dirs or []) else: - raise TypeError( - "'library_dirs' (if supplied) must be a list of strings") + raise TypeError("'library_dirs' (if supplied) must be a list of strings") if runtime_library_dirs is None: runtime_library_dirs = self.runtime_library_dirs elif isinstance(runtime_library_dirs, (list, tuple)): - runtime_library_dirs = (list(runtime_library_dirs) + - (self.runtime_library_dirs or [])) + runtime_library_dirs = list(runtime_library_dirs) + ( + self.runtime_library_dirs or [] + ) else: - raise TypeError("'runtime_library_dirs' (if supplied) " - "must be a list of strings") + raise TypeError( + "'runtime_library_dirs' (if supplied) " "must be a list of strings" + ) return (libraries, library_dirs, runtime_library_dirs) @@ -466,9 +469,9 @@ class CCompiler: return True else: if self.dry_run: - newer = newer_group (objects, output_file, missing='newer') + newer = newer_group(objects, output_file, missing='newer') else: - newer = newer_group (objects, output_file) + newer = newer_group(objects, output_file) return newer def detect_language(self, sources): @@ -491,12 +494,18 @@ class CCompiler: pass return lang - # -- Worker methods ------------------------------------------------ # (must be implemented by subclasses) - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): + def preprocess( + self, + source, + output_file=None, + macros=None, + include_dirs=None, + extra_preargs=None, + extra_postargs=None, + ): """Preprocess a single C/C++ source file, named in 'source'. Output will be written to file named 'output_file', or stdout if 'output_file' not supplied. 'macros' is a list of macro @@ -508,9 +517,17 @@ class CCompiler: """ pass - def compile(self, sources, output_dir=None, macros=None, - include_dirs=None, debug=0, extra_preargs=None, - extra_postargs=None, depends=None): + def compile( + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): """Compile one or more source files. 'sources' must be a list of filenames, most likely C/C++ @@ -561,9 +578,9 @@ class CCompiler: """ # A concrete compiler class can either override this method # entirely or implement _compile(). - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) + macros, objects, extra_postargs, pp_opts, build = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) for obj in objects: @@ -582,8 +599,9 @@ class CCompiler: # should implement _compile(). pass - def create_static_lib(self, objects, output_libname, output_dir=None, - debug=0, target_lang=None): + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): """Link a bunch of stuff together to create a static library file. The "bunch of stuff" consists of the list of object files supplied as 'objects', the extra object files supplied to @@ -608,26 +626,27 @@ class CCompiler: """ pass - # values for target_desc parameter in link() SHARED_OBJECT = "shared_object" SHARED_LIBRARY = "shared_library" EXECUTABLE = "executable" - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): """Link a bunch of stuff together to create an executable or shared library file. @@ -673,66 +692,98 @@ class CCompiler: """ raise NotImplementedError - # Old 'link_*()' methods, rewritten to use the new 'link()' method. - def link_shared_lib(self, - objects, - output_libname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_LIBRARY, objects, - self.library_filename(output_libname, lib_type='shared'), - output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_shared_object(self, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): - self.link(CCompiler.SHARED_OBJECT, objects, - output_filename, output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - - def link_executable(self, - objects, - output_progname, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - target_lang=None): - self.link(CCompiler.EXECUTABLE, objects, - self.executable_filename(output_progname), output_dir, - libraries, library_dirs, runtime_library_dirs, None, - debug, extra_preargs, extra_postargs, None, target_lang) - + def link_shared_lib( + self, + objects, + output_libname, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): + self.link( + CCompiler.SHARED_LIBRARY, + objects, + self.library_filename(output_libname, lib_type='shared'), + output_dir, + libraries, + library_dirs, + runtime_library_dirs, + export_symbols, + debug, + extra_preargs, + extra_postargs, + build_temp, + target_lang, + ) + + def link_shared_object( + self, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): + self.link( + CCompiler.SHARED_OBJECT, + objects, + output_filename, + output_dir, + libraries, + library_dirs, + runtime_library_dirs, + export_symbols, + debug, + extra_preargs, + extra_postargs, + build_temp, + target_lang, + ) + + def link_executable( + self, + objects, + output_progname, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + target_lang=None, + ): + self.link( + CCompiler.EXECUTABLE, + objects, + self.executable_filename(output_progname), + output_dir, + libraries, + library_dirs, + runtime_library_dirs, + None, + debug, + extra_preargs, + extra_postargs, + None, + target_lang, + ) # -- Miscellaneous methods ----------------------------------------- # These are all used by the 'gen_lib_options() function; there is @@ -757,8 +808,14 @@ class CCompiler: """ raise NotImplementedError - def has_function(self, funcname, includes=None, include_dirs=None, - libraries=None, library_dirs=None): + def has_function( + self, + funcname, + includes=None, + include_dirs=None, + libraries=None, + library_dirs=None, + ): """Return a boolean indicating whether funcname is supported on the current platform. The optional arguments can be used to augment the compilation environment. @@ -767,6 +824,7 @@ class CCompiler: # import math which might not be available at that point - maybe # the necessary logic should just be inlined? import tempfile + if includes is None: includes = [] if include_dirs is None: @@ -780,12 +838,15 @@ class CCompiler: try: for incl in includes: f.write("""#include "%s"\n""" % incl) - f.write("""\ + f.write( + """\ int main (int argc, char **argv) { %s(); return 0; } -""" % funcname) +""" + % funcname + ) finally: f.close() try: @@ -796,9 +857,9 @@ int main (int argc, char **argv) { os.remove(fname) try: - self.link_executable(objects, "a.out", - libraries=libraries, - library_dirs=library_dirs) + self.link_executable( + objects, "a.out", libraries=libraries, library_dirs=library_dirs + ) except (LinkError, TypeError): return False else: @@ -808,7 +869,7 @@ int main (int argc, char **argv) { os.remove(fn) return True - def find_library_file (self, dirs, lib, debug=0): + def find_library_file(self, dirs, lib, debug=0): """Search the specified list of directories for a static or shared library file 'lib' and return the full path to that file. If 'debug' true, look for a debugging version (if that makes sense on @@ -857,15 +918,15 @@ int main (int argc, char **argv) { obj_names = [] for src_name in source_filenames: base, ext = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base) :] # If abs, chop off leading / if ext not in self.src_extensions: raise UnknownFileError( - "unknown file type '%s' (from '%s')" % (ext, src_name)) + "unknown file type '%s' (from '%s')" % (ext, src_name) + ) if strip_dir: base = os.path.basename(base) - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, base + self.obj_extension)) return obj_names def shared_object_filename(self, basename, strip_dir=0, output_dir=''): @@ -880,12 +941,14 @@ int main (int argc, char **argv) { basename = os.path.basename(basename) return os.path.join(output_dir, basename + (self.exe_extension or '')) - def library_filename(self, libname, lib_type='static', # or 'shared' - strip_dir=0, output_dir=''): + def library_filename( + self, libname, lib_type='static', strip_dir=0, output_dir='' # or 'shared' + ): assert output_dir is not None if lib_type not in ("static", "shared", "dylib", "xcode_stub"): raise ValueError( - "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"") + "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"" + ) fmt = getattr(self, lib_type + "_lib_format") ext = getattr(self, lib_type + "_lib_extension") @@ -896,7 +959,6 @@ int main (int argc, char **argv) { return os.path.join(output_dir, dir, filename) - # -- Utility methods ----------------------------------------------- def announce(self, msg, level=1): @@ -904,6 +966,7 @@ int main (int argc, char **argv) { def debug_print(self, msg): from distutils.debug import DEBUG + if DEBUG: print(msg) @@ -919,7 +982,7 @@ int main (int argc, char **argv) { def move_file(self, src, dst): return move_file(src, dst, dry_run=self.dry_run) - def mkpath (self, name, mode=0o777): + def mkpath(self, name, mode=0o777): mkpath(name, mode, dry_run=self.dry_run) @@ -928,54 +991,59 @@ int main (int argc, char **argv) { # patterns. Order is important; platform mappings are preferred over # OS names. _default_compilers = ( - # Platform string mappings - # on a cygwin built python we can use gcc like an ordinary UNIXish # compiler ('cygwin.*', 'unix'), - # OS name mappings ('posix', 'unix'), ('nt', 'msvc'), +) - ) def get_default_compiler(osname=None, platform=None): """Determine the default compiler to use for the given platform. - osname should be one of the standard Python OS names (i.e. the - ones returned by os.name) and platform the common value - returned by sys.platform for the platform in question. + osname should be one of the standard Python OS names (i.e. the + ones returned by os.name) and platform the common value + returned by sys.platform for the platform in question. - The default values are os.name and sys.platform in case the - parameters are not given. + The default values are os.name and sys.platform in case the + parameters are not given. """ if osname is None: osname = os.name if platform is None: platform = sys.platform for pattern, compiler in _default_compilers: - if re.match(pattern, platform) is not None or \ - re.match(pattern, osname) is not None: + if ( + re.match(pattern, platform) is not None + or re.match(pattern, osname) is not None + ): return compiler # Default to Unix compiler return 'unix' + # Map compiler types to (module_name, class_name) pairs -- ie. where to # find the code that implements an interface to this compiler. (The module # is assumed to be in the 'distutils' package.) -compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler', - "standard UNIX-style compiler"), - 'msvc': ('_msvccompiler', 'MSVCCompiler', - "Microsoft Visual C++"), - 'cygwin': ('cygwinccompiler', 'CygwinCCompiler', - "Cygwin port of GNU C Compiler for Win32"), - 'mingw32': ('cygwinccompiler', 'Mingw32CCompiler', - "Mingw32 port of GNU C Compiler for Win32"), - 'bcpp': ('bcppcompiler', 'BCPPCompiler', - "Borland C++ Compiler"), - } +compiler_class = { + 'unix': ('unixccompiler', 'UnixCCompiler', "standard UNIX-style compiler"), + 'msvc': ('_msvccompiler', 'MSVCCompiler', "Microsoft Visual C++"), + 'cygwin': ( + 'cygwinccompiler', + 'CygwinCCompiler', + "Cygwin port of GNU C Compiler for Win32", + ), + 'mingw32': ( + 'cygwinccompiler', + 'Mingw32CCompiler', + "Mingw32 port of GNU C Compiler for Win32", + ), + 'bcpp': ('bcppcompiler', 'BCPPCompiler', "Borland C++ Compiler"), +} + def show_compilers(): """Print list of available compilers (used by the "--help-compiler" @@ -985,10 +1053,10 @@ def show_compilers(): # "--compiler", which just happens to be the case for the three # commands that use it. from distutils.fancy_getopt import FancyGetopt + compilers = [] for compiler in compiler_class.keys(): - compilers.append(("compiler="+compiler, None, - compiler_class[compiler][2])) + compilers.append(("compiler=" + compiler, None, compiler_class[compiler][2])) compilers.sort() pretty_printer = FancyGetopt(compilers) pretty_printer.print_help("List of available compilers:") @@ -1021,17 +1089,18 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0): try: module_name = "distutils." + module_name - __import__ (module_name) + __import__(module_name) module = sys.modules[module_name] klass = vars(module)[class_name] except ImportError: raise DistutilsModuleError( - "can't compile C/C++ code: unable to load module '%s'" % \ - module_name) + "can't compile C/C++ code: unable to load module '%s'" % module_name + ) except KeyError: raise DistutilsModuleError( - "can't compile C/C++ code: unable to find class '%s' " - "in module '%s'" % (class_name, module_name)) + "can't compile C/C++ code: unable to find class '%s' " + "in module '%s'" % (class_name, module_name) + ) # XXX The None is necessary to preserve backwards compatibility # with classes that expect verbose to be the first positional @@ -1064,14 +1133,14 @@ def gen_preprocess_options(macros, include_dirs): for macro in macros: if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2): raise TypeError( - "bad macro definition '%s': " - "each element of 'macros' list must be a 1- or 2-tuple" - % macro) + "bad macro definition '%s': " + "each element of 'macros' list must be a 1- or 2-tuple" % macro + ) - if len(macro) == 1: # undefine this macro + if len(macro) == 1: # undefine this macro pp_opts.append("-U%s" % macro[0]) elif len(macro) == 2: - if macro[1] is None: # define with no explicit value + if macro[1] is None: # define with no explicit value pp_opts.append("-D%s" % macro[0]) else: # XXX *don't* need to be clever about quoting the @@ -1084,7 +1153,7 @@ def gen_preprocess_options(macros, include_dirs): return pp_opts -def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): +def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries): """Generate linker options for searching library directories and linking with specific libraries. 'libraries' and 'library_dirs' are, respectively, lists of library names (not filenames!) and search @@ -1116,8 +1185,9 @@ def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries): if lib_file: lib_opts.append(lib_file) else: - compiler.warn("no library file corresponding to " - "'%s' found (skipping)" % lib) + compiler.warn( + "no library file corresponding to " "'%s' found (skipping)" % lib + ) else: - lib_opts.append(compiler.library_option (lib)) + lib_opts.append(compiler.library_option(lib)) return lib_opts diff --git a/setuptools/_distutils/cmd.py b/setuptools/_distutils/cmd.py index dba3191..4a9bcc2 100644 --- a/setuptools/_distutils/cmd.py +++ b/setuptools/_distutils/cmd.py @@ -9,6 +9,7 @@ from distutils.errors import DistutilsOptionError from distutils import util, dir_util, file_util, archive_util, dep_util from distutils import log + class Command: """Abstract base class for defining command classes, the "worker bees" of the Distutils. A useful analogy for command classes is to think of @@ -41,7 +42,6 @@ class Command: # defined. The canonical example is the "install" command. sub_commands = [] - # -- Creation/initialization methods ------------------------------- def __init__(self, dist): @@ -130,8 +130,9 @@ class Command: This method must be implemented by all command classes. """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) + raise RuntimeError( + "abstract method -- subclass %s must override" % self.__class__ + ) def finalize_options(self): """Set final values for all the options that this command supports. @@ -144,12 +145,13 @@ class Command: This method must be implemented by all command classes. """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) - + raise RuntimeError( + "abstract method -- subclass %s must override" % self.__class__ + ) def dump_options(self, header=None, indent=""): from distutils.fancy_getopt import longopt_xlate + if header is None: header = "command options for '%s':" % self.get_command_name() self.announce(indent + header, level=log.INFO) @@ -159,8 +161,7 @@ class Command: if option[-1] == "=": option = option[:-1] value = getattr(self, option) - self.announce(indent + "%s = %s" % (option, value), - level=log.INFO) + self.announce(indent + "%s = %s" % (option, value), level=log.INFO) def run(self): """A command's raison d'etre: carry out the action it exists to @@ -172,8 +173,9 @@ class Command: This method must be implemented by all command classes. """ - raise RuntimeError("abstract method -- subclass %s must override" - % self.__class__) + raise RuntimeError( + "abstract method -- subclass %s must override" % self.__class__ + ) def announce(self, msg, level=1): """If the current verbosity level is of greater than or equal to @@ -186,11 +188,11 @@ class Command: DISTUTILS_DEBUG environment variable) flag is true. """ from distutils.debug import DEBUG + if DEBUG: print(msg) sys.stdout.flush() - # -- Option validation methods ------------------------------------- # (these are very handy in writing the 'finalize_options()' method) # @@ -210,8 +212,9 @@ class Command: setattr(self, option, default) return default elif not isinstance(val, str): - raise DistutilsOptionError("'%s' must be a %s (got `%s`)" - % (option, what, val)) + raise DistutilsOptionError( + "'%s' must be a %s (got `%s`)" % (option, what, val) + ) return val def ensure_string(self, option, default=None): @@ -238,27 +241,29 @@ class Command: ok = False if not ok: raise DistutilsOptionError( - "'%s' must be a list of strings (got %r)" - % (option, val)) + "'%s' must be a list of strings (got %r)" % (option, val) + ) - def _ensure_tested_string(self, option, tester, what, error_fmt, - default=None): + def _ensure_tested_string(self, option, tester, what, error_fmt, default=None): val = self._ensure_stringlike(option, what, default) if val is not None and not tester(val): - raise DistutilsOptionError(("error in '%s' option: " + error_fmt) - % (option, val)) + raise DistutilsOptionError( + ("error in '%s' option: " + error_fmt) % (option, val) + ) def ensure_filename(self, option): """Ensure that 'option' is the name of an existing file.""" - self._ensure_tested_string(option, os.path.isfile, - "filename", - "'%s' does not exist or is not a file") + self._ensure_tested_string( + option, os.path.isfile, "filename", "'%s' does not exist or is not a file" + ) def ensure_dirname(self, option): - self._ensure_tested_string(option, os.path.isdir, - "directory name", - "'%s' does not exist or is not a directory") - + self._ensure_tested_string( + option, + os.path.isdir, + "directory name", + "'%s' does not exist or is not a directory", + ) # -- Convenience methods for commands ------------------------------ @@ -302,8 +307,7 @@ class Command: # XXX rename to 'get_reinitialized_command()'? (should do the # same in dist.py, if so) def reinitialize_command(self, command, reinit_subcommands=0): - return self.distribution.reinitialize_command(command, - reinit_subcommands) + return self.distribution.reinitialize_command(command, reinit_subcommands) def run_command(self, command): """Run some other command: uses the 'run_command()' method of @@ -325,7 +329,6 @@ class Command: commands.append(cmd_name) return commands - # -- External world manipulation ----------------------------------- def warn(self, msg): @@ -337,41 +340,70 @@ class Command: def mkpath(self, name, mode=0o777): dir_util.mkpath(name, mode, dry_run=self.dry_run) - def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1, - link=None, level=1): + def copy_file( + self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1 + ): """Copy a file respecting verbose, dry-run and force flags. (The former two default to whatever is in the Distribution object, and the latter defaults to false for commands that don't define it.)""" - return file_util.copy_file(infile, outfile, preserve_mode, - preserve_times, not self.force, link, - dry_run=self.dry_run) - - def copy_tree(self, infile, outfile, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, level=1): + return file_util.copy_file( + infile, + outfile, + preserve_mode, + preserve_times, + not self.force, + link, + dry_run=self.dry_run, + ) + + def copy_tree( + self, + infile, + outfile, + preserve_mode=1, + preserve_times=1, + preserve_symlinks=0, + level=1, + ): """Copy an entire directory tree respecting verbose, dry-run, and force flags. """ - return dir_util.copy_tree(infile, outfile, preserve_mode, - preserve_times, preserve_symlinks, - not self.force, dry_run=self.dry_run) - - def move_file (self, src, dst, level=1): + return dir_util.copy_tree( + infile, + outfile, + preserve_mode, + preserve_times, + preserve_symlinks, + not self.force, + dry_run=self.dry_run, + ) + + def move_file(self, src, dst, level=1): """Move a file respecting dry-run flag.""" return file_util.move_file(src, dst, dry_run=self.dry_run) def spawn(self, cmd, search_path=1, level=1): """Spawn an external command respecting dry-run flag.""" from distutils.spawn import spawn - spawn(cmd, search_path, dry_run=self.dry_run) - def make_archive(self, base_name, format, root_dir=None, base_dir=None, - owner=None, group=None): - return archive_util.make_archive(base_name, format, root_dir, base_dir, - dry_run=self.dry_run, - owner=owner, group=group) + spawn(cmd, search_path, dry_run=self.dry_run) - def make_file(self, infiles, outfile, func, args, - exec_msg=None, skip_msg=None, level=1): + def make_archive( + self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None + ): + return archive_util.make_archive( + base_name, + format, + root_dir, + base_dir, + dry_run=self.dry_run, + owner=owner, + group=group, + ) + + def make_file( + self, infiles, outfile, func, args, exec_msg=None, skip_msg=None, level=1 + ): """Special case of 'execute()' for operations that process one or more input files and generate one output file. Works just like 'execute()', except the operation is skipped and a different @@ -387,8 +419,7 @@ class Command: if isinstance(infiles, str): infiles = (infiles,) elif not isinstance(infiles, (list, tuple)): - raise TypeError( - "'infiles' must be a string, or a list or tuple of strings") + raise TypeError("'infiles' must be a string, or a list or tuple of strings") if exec_msg is None: exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles)) diff --git a/setuptools/_distutils/command/__init__.py b/setuptools/_distutils/command/__init__.py index 481eea9..d199c24 100644 --- a/setuptools/_distutils/command/__init__.py +++ b/setuptools/_distutils/command/__init__.py @@ -3,29 +3,30 @@ Package containing implementation of all the standard Distutils commands.""" -__all__ = ['build', - 'build_py', - 'build_ext', - 'build_clib', - 'build_scripts', - 'clean', - 'install', - 'install_lib', - 'install_headers', - 'install_scripts', - 'install_data', - 'sdist', - 'register', - 'bdist', - 'bdist_dumb', - 'bdist_rpm', - 'bdist_wininst', - 'check', - 'upload', - # These two are reserved for future use: - #'bdist_sdux', - #'bdist_pkgtool', - # Note: - # bdist_packager is not included because it only provides - # an abstract base class - ] +__all__ = [ + 'build', + 'build_py', + 'build_ext', + 'build_clib', + 'build_scripts', + 'clean', + 'install', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + 'sdist', + 'register', + 'bdist', + 'bdist_dumb', + 'bdist_rpm', + 'bdist_wininst', + 'check', + 'upload', + # These two are reserved for future use: + #'bdist_sdux', + #'bdist_pkgtool', + # Note: + # bdist_packager is not included because it only provides + # an abstract base class +] diff --git a/setuptools/_distutils/command/bdist.py b/setuptools/_distutils/command/bdist.py index 014871d..2a63976 100644 --- a/setuptools/_distutils/command/bdist.py +++ b/setuptools/_distutils/command/bdist.py @@ -10,13 +10,12 @@ from distutils.util import get_platform def show_formats(): - """Print list of available formats (arguments to "--format" option). - """ + """Print list of available formats (arguments to "--format" option).""" from distutils.fancy_getopt import FancyGetopt + formats = [] for format in bdist.format_commands: - formats.append(("formats=" + format, None, - bdist.format_command[format][1])) + formats.append(("formats=" + format, None, bdist.format_command[format][1])) pretty_printer = FancyGetopt(formats) pretty_printer.print_help("List of available distribution formats:") @@ -25,58 +24,71 @@ class bdist(Command): description = "create a built (binary) distribution" - user_options = [('bdist-base=', 'b', - "temporary directory for creating built distributions"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('formats=', None, - "formats for distribution (comma-separated list)"), - ('dist-dir=', 'd', - "directory to put final built distributions in " - "[default: dist]"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] + user_options = [ + ('bdist-base=', 'b', "temporary directory for creating built distributions"), + ( + 'plat-name=', + 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform(), + ), + ('formats=', None, "formats for distribution (comma-separated list)"), + ( + 'dist-dir=', + 'd', + "directory to put final built distributions in " "[default: dist]", + ), + ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), + ( + 'owner=', + 'u', + "Owner name used when creating a tar file" " [default: current user]", + ), + ( + 'group=', + 'g', + "Group name used when creating a tar file" " [default: current group]", + ), + ] boolean_options = ['skip-build'] help_options = [ - ('help-formats', None, - "lists available distribution formats", show_formats), - ] + ('help-formats', None, "lists available distribution formats", show_formats), + ] # The following commands do not take a format option from bdist no_format_option = ('bdist_rpm',) # This won't do in reality: will need to distinguish RPM-ish Linux, # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. - default_format = {'posix': 'gztar', - 'nt': 'zip'} + default_format = {'posix': 'gztar', 'nt': 'zip'} # Establish the preferred order (for the --help-formats option). - format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar', - 'wininst', 'zip', 'msi'] + format_commands = [ + 'rpm', + 'gztar', + 'bztar', + 'xztar', + 'ztar', + 'tar', + 'wininst', + 'zip', + 'msi', + ] # And the real information. - format_command = {'rpm': ('bdist_rpm', "RPM distribution"), - 'gztar': ('bdist_dumb', "gzip'ed tar file"), - 'bztar': ('bdist_dumb', "bzip2'ed tar file"), - 'xztar': ('bdist_dumb', "xz'ed tar file"), - 'ztar': ('bdist_dumb', "compressed tar file"), - 'tar': ('bdist_dumb', "tar file"), - 'wininst': ('bdist_wininst', - "Windows executable installer"), - 'zip': ('bdist_dumb', "ZIP file"), - 'msi': ('bdist_msi', "Microsoft Installer") - } - + format_command = { + 'rpm': ('bdist_rpm', "RPM distribution"), + 'gztar': ('bdist_dumb', "gzip'ed tar file"), + 'bztar': ('bdist_dumb', "bzip2'ed tar file"), + 'xztar': ('bdist_dumb', "xz'ed tar file"), + 'ztar': ('bdist_dumb', "compressed tar file"), + 'tar': ('bdist_dumb', "tar file"), + 'wininst': ('bdist_wininst', "Windows executable installer"), + 'zip': ('bdist_dumb', "ZIP file"), + 'msi': ('bdist_msi', "Microsoft Installer"), + } def initialize_options(self): self.bdist_base = None @@ -100,8 +112,7 @@ class bdist(Command): # "build/bdist./dumb", "build/bdist./rpm", etc.) if self.bdist_base is None: build_base = self.get_finalized_command('build').build_base - self.bdist_base = os.path.join(build_base, - 'bdist.' + self.plat_name) + self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name) self.ensure_string_list('formats') if self.formats is None: @@ -109,8 +120,9 @@ class bdist(Command): self.formats = [self.default_format[os.name]] except KeyError: raise DistutilsPlatformError( - "don't know how to create built distributions " - "on platform %s" % os.name) + "don't know how to create built distributions " + "on platform %s" % os.name + ) if self.dist_dir is None: self.dist_dir = "dist" @@ -138,6 +150,6 @@ class bdist(Command): # If we're going to need to run this command again, tell it to # keep its temporary files around so subsequent runs go faster. - if cmd_name in commands[i+1:]: + if cmd_name in commands[i + 1 :]: sub_cmd.keep_temp = 1 self.run_command(cmd_name) diff --git a/setuptools/_distutils/command/bdist_dumb.py b/setuptools/_distutils/command/bdist_dumb.py index f0d6b5b..3c38782 100644 --- a/setuptools/_distutils/command/bdist_dumb.py +++ b/setuptools/_distutils/command/bdist_dumb.py @@ -12,40 +12,52 @@ from distutils.errors import * from distutils.sysconfig import get_python_version from distutils import log + class bdist_dumb(Command): description = "create a \"dumb\" built distribution" - user_options = [('bdist-dir=', 'd', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('format=', 'f', - "archive format to create (tar, gztar, bztar, xztar, " - "ztar, zip)"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('relative', None, - "build the archive using relative paths " - "(default: false)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] + user_options = [ + ('bdist-dir=', 'd', "temporary directory for creating the distribution"), + ( + 'plat-name=', + 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform(), + ), + ( + 'format=', + 'f', + "archive format to create (tar, gztar, bztar, xztar, " "ztar, zip)", + ), + ( + 'keep-temp', + 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive", + ), + ('dist-dir=', 'd', "directory to put final built distributions in"), + ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), + ( + 'relative', + None, + "build the archive using relative paths " "(default: false)", + ), + ( + 'owner=', + 'u', + "Owner name used when creating a tar file" " [default: current user]", + ), + ( + 'group=', + 'g', + "Group name used when creating a tar file" " [default: current group]", + ), + ] boolean_options = ['keep-temp', 'skip-build', 'relative'] - default_format = { 'posix': 'gztar', - 'nt': 'zip' } + default_format = {'posix': 'gztar', 'nt': 'zip'} def initialize_options(self): self.bdist_dir = None @@ -68,13 +80,16 @@ class bdist_dumb(Command): self.format = self.default_format[os.name] except KeyError: raise DistutilsPlatformError( - "don't know how to create dumb built distributions " - "on platform %s" % os.name) + "don't know how to create dumb built distributions " + "on platform %s" % os.name + ) - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ('skip_build', 'skip_build')) + self.set_undefined_options( + 'bdist', + ('dist_dir', 'dist_dir'), + ('plat_name', 'plat_name'), + ('skip_build', 'skip_build'), + ) def run(self): if not self.skip_build: @@ -90,34 +105,38 @@ class bdist_dumb(Command): # And make an archive relative to the root of the # pseudo-installation tree. - archive_basename = "%s.%s" % (self.distribution.get_fullname(), - self.plat_name) + archive_basename = "%s.%s" % (self.distribution.get_fullname(), self.plat_name) pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) if not self.relative: archive_root = self.bdist_dir else: - if (self.distribution.has_ext_modules() and - (install.install_base != install.install_platbase)): + if self.distribution.has_ext_modules() and ( + install.install_base != install.install_platbase + ): raise DistutilsPlatformError( - "can't make a dumb built distribution where " - "base and platbase are different (%s, %s)" - % (repr(install.install_base), - repr(install.install_platbase))) + "can't make a dumb built distribution where " + "base and platbase are different (%s, %s)" + % (repr(install.install_base), repr(install.install_platbase)) + ) else: - archive_root = os.path.join(self.bdist_dir, - ensure_relative(install.install_base)) + archive_root = os.path.join( + self.bdist_dir, ensure_relative(install.install_base) + ) # Make the archive - filename = self.make_archive(pseudoinstall_root, - self.format, root_dir=archive_root, - owner=self.owner, group=self.group) + filename = self.make_archive( + pseudoinstall_root, + self.format, + root_dir=archive_root, + owner=self.owner, + group=self.group, + ) if self.distribution.has_ext_modules(): pyversion = get_python_version() else: pyversion = 'any' - self.distribution.dist_files.append(('bdist_dumb', pyversion, - filename)) + self.distribution.dist_files.append(('bdist_dumb', pyversion, filename)) if not self.keep_temp: remove_tree(self.bdist_dir, dry_run=self.dry_run) diff --git a/setuptools/_distutils/command/bdist_msi.py b/setuptools/_distutils/command/bdist_msi.py index 56c4b98..2f292c9 100644 --- a/setuptools/_distutils/command/bdist_msi.py +++ b/setuptools/_distutils/command/bdist_msi.py @@ -20,17 +20,19 @@ import msilib from msilib import schema, sequence, text from msilib import Directory, Feature, Dialog, add_data + class PyDialog(Dialog): """Dialog class with a fixed layout: controls at the top, then a ruler, then a list of buttons: back, next, cancel. Optionally a bitmap at the left.""" + def __init__(self, *args, **kw): """Dialog(database, name, x, y, w, h, attributes, title, first, default, cancel, bitmap=true)""" super().__init__(*args) ruler = self.h - 36 - bmwidth = 152*ruler/328 - #if kw.get("bitmap", True): + bmwidth = 152 * ruler / 328 + # if kw.get("bitmap", True): # self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin") self.line("BottomLine", 0, ruler, self.w, 0) @@ -38,41 +40,40 @@ class PyDialog(Dialog): "Set the title text of the dialog at the top." # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix, # text, in VerdanaBold10 - self.text("Title", 15, 10, 320, 60, 0x30003, - r"{\VerdanaBold10}%s" % title) + self.text("Title", 15, 10, 320, 60, 0x30003, r"{\VerdanaBold10}%s" % title) - def back(self, title, next, name = "Back", active = 1): + def back(self, title, next, name="Back", active=1): """Add a back button with a given title, the tab-next button, its name in the Control table, possibly initially disabled. Return the button, so that events can be associated""" if active: - flags = 3 # Visible|Enabled + flags = 3 # Visible|Enabled else: - flags = 1 # Visible - return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next) + flags = 1 # Visible + return self.pushbutton(name, 180, self.h - 27, 56, 17, flags, title, next) - def cancel(self, title, next, name = "Cancel", active = 1): + def cancel(self, title, next, name="Cancel", active=1): """Add a cancel button with a given title, the tab-next button, its name in the Control table, possibly initially disabled. Return the button, so that events can be associated""" if active: - flags = 3 # Visible|Enabled + flags = 3 # Visible|Enabled else: - flags = 1 # Visible - return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next) + flags = 1 # Visible + return self.pushbutton(name, 304, self.h - 27, 56, 17, flags, title, next) - def next(self, title, next, name = "Next", active = 1): + def next(self, title, next, name="Next", active=1): """Add a Next button with a given title, the tab-next button, its name in the Control table, possibly initially disabled. Return the button, so that events can be associated""" if active: - flags = 3 # Visible|Enabled + flags = 3 # Visible|Enabled else: - flags = 1 # Visible - return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next) + flags = 1 # Visible + return self.pushbutton(name, 236, self.h - 27, 56, 17, flags, title, next) def xbutton(self, name, title, next, xpos): """Add a button with a given title, the tab-next button, @@ -80,55 +81,96 @@ class PyDialog(Dialog): y-position is aligned with the other buttons. Return the button, so that events can be associated""" - return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next) + return self.pushbutton( + name, int(self.w * xpos - 28), self.h - 27, 56, 17, 3, title, next + ) + class bdist_msi(Command): description = "create a Microsoft Installer (.msi) binary distribution" - user_options = [('bdist-dir=', None, - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('target-version=', None, - "require a specific python version" + - " on the target system"), - ('no-target-compile', 'c', - "do not compile .py to .pyc on the target system"), - ('no-target-optimize', 'o', - "do not compile .py to .pyo (optimized) " - "on the target system"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('install-script=', None, - "basename of installation script to be run after " - "installation or before deinstallation"), - ('pre-install-script=', None, - "Fully qualified filename of a script to be run before " - "any files are installed. This script need not be in the " - "distribution"), - ] - - boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', - 'skip-build'] - - all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4', - '2.5', '2.6', '2.7', '2.8', '2.9', - '3.0', '3.1', '3.2', '3.3', '3.4', - '3.5', '3.6', '3.7', '3.8', '3.9'] + user_options = [ + ('bdist-dir=', None, "temporary directory for creating the distribution"), + ( + 'plat-name=', + 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform(), + ), + ( + 'keep-temp', + 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive", + ), + ( + 'target-version=', + None, + "require a specific python version" + " on the target system", + ), + ('no-target-compile', 'c', "do not compile .py to .pyc on the target system"), + ( + 'no-target-optimize', + 'o', + "do not compile .py to .pyo (optimized) " "on the target system", + ), + ('dist-dir=', 'd', "directory to put final built distributions in"), + ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), + ( + 'install-script=', + None, + "basename of installation script to be run after " + "installation or before deinstallation", + ), + ( + 'pre-install-script=', + None, + "Fully qualified filename of a script to be run before " + "any files are installed. This script need not be in the " + "distribution", + ), + ] + + boolean_options = [ + 'keep-temp', + 'no-target-compile', + 'no-target-optimize', + 'skip-build', + ] + + all_versions = [ + '2.0', + '2.1', + '2.2', + '2.3', + '2.4', + '2.5', + '2.6', + '2.7', + '2.8', + '2.9', + '3.0', + '3.1', + '3.2', + '3.3', + '3.4', + '3.5', + '3.6', + '3.7', + '3.8', + '3.9', + ] other_version = 'X' def __init__(self, *args, **kw): super().__init__(*args, **kw) - warnings.warn("bdist_msi command is deprecated since Python 3.9, " - "use bdist_wheel (wheel packages) instead", - DeprecationWarning, 2) + warnings.warn( + "bdist_msi command is deprecated since Python 3.9, " + "use bdist_wheel (wheel packages) instead", + DeprecationWarning, + 2, + ) def initialize_options(self): self.bdist_dir = None @@ -156,22 +198,28 @@ class bdist_msi(Command): if self.target_version: self.versions = [self.target_version] - if not self.skip_build and self.distribution.has_ext_modules()\ - and self.target_version != short_version: + if ( + not self.skip_build + and self.distribution.has_ext_modules() + and self.target_version != short_version + ): raise DistutilsOptionError( - "target version can only be %s, or the '--skip-build'" - " option must be specified" % (short_version,)) + "target version can only be %s, or the '--skip-build'" + " option must be specified" % (short_version,) + ) else: self.versions = list(self.all_versions) - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ) + self.set_undefined_options( + 'bdist', + ('dist_dir', 'dist_dir'), + ('plat_name', 'plat_name'), + ) if self.pre_install_script: raise DistutilsOptionError( - "the pre-install-script feature is not yet implemented") + "the pre-install-script feature is not yet implemented" + ) if self.install_script: for script in self.distribution.scripts: @@ -179,8 +227,8 @@ class bdist_msi(Command): break else: raise DistutilsOptionError( - "install_script '%s' not found in scripts" - % self.install_script) + "install_script '%s' not found in scripts" % self.install_script + ) self.install_script_key = None def run(self): @@ -210,8 +258,7 @@ class bdist_msi(Command): target_version = '%d.%d' % sys.version_info[:2] plat_specifier = ".%s-%s" % (self.plat_name, target_version) build = self.get_finalized_command('build') - build.build_lib = os.path.join(build.build_base, - 'lib' + plat_specifier) + build.build_lib = os.path.join(build.build_base, 'lib' + plat_specifier) log.info("installing to %s", self.bdist_dir) install.ensure_finalized() @@ -228,7 +275,8 @@ class bdist_msi(Command): fullname = self.distribution.get_fullname() installer_name = self.get_installer_filename(fullname) installer_name = os.path.abspath(installer_name) - if os.path.exists(installer_name): os.unlink(installer_name) + if os.path.exists(installer_name): + os.unlink(installer_name) metadata = self.distribution.metadata author = metadata.author or metadata.maintainer @@ -244,9 +292,9 @@ class bdist_msi(Command): product_name = "Python %s %s" % (self.target_version, fullname) else: product_name = "Python %s" % (fullname) - self.db = msilib.init_database(installer_name, schema, - product_name, msilib.gen_uuid(), - sversion, author) + self.db = msilib.init_database( + installer_name, schema, product_name, msilib.gen_uuid(), sversion, author + ) msilib.add_tables(self.db, sequence) props = [('DistVersion', version)] email = metadata.author_email or metadata.maintainer_email @@ -276,8 +324,7 @@ class bdist_msi(Command): rootdir = os.path.abspath(self.bdist_dir) root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir") - f = Feature(db, "Python", "Python", "Everything", - 0, 1, directory="TARGETDIR") + f = Feature(db, "Python", "Python", "Everything", 0, 1, directory="TARGETDIR") items = [(f, root, '')] for version in self.versions + [self.other_version]: @@ -312,15 +359,27 @@ class bdist_msi(Command): dir.start_component(dir.logical, feature, 0) if afile not in seen: key = seen[afile] = dir.add_file(file) - if file==self.install_script: + if file == self.install_script: if self.install_script_key: raise DistutilsOptionError( - "Multiple files with name %s" % file) + "Multiple files with name %s" % file + ) self.install_script_key = '[#%s]' % key else: key = seen[afile] - add_data(self.db, "DuplicateFile", - [(key + version, dir.component, key, None, dir.logical)]) + add_data( + self.db, + "DuplicateFile", + [ + ( + key + version, + dir.component, + key, + None, + dir.logical, + ) + ], + ) db.Commit() cab.commit(db) @@ -349,32 +408,60 @@ class bdist_msi(Command): exe_prop = "PYTHON" + ver if msilib.Win64: # type: msidbLocatorTypeRawValue + msidbLocatorType64bit - Type = 2+16 + Type = 2 + 16 else: Type = 2 - add_data(self.db, "RegLocator", - [(machine_reg, 2, install_path, None, Type), - (user_reg, 1, install_path, None, Type)]) - add_data(self.db, "AppSearch", - [(machine_prop, machine_reg), - (user_prop, user_reg)]) - add_data(self.db, "CustomAction", - [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"), - (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"), - (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"), - ]) - add_data(self.db, "InstallExecuteSequence", - [(machine_action, machine_prop, start), - (user_action, user_prop, start + 1), - (exe_action, None, start + 2), - ]) - add_data(self.db, "InstallUISequence", - [(machine_action, machine_prop, start), - (user_action, user_prop, start + 1), - (exe_action, None, start + 2), - ]) - add_data(self.db, "Condition", - [("Python" + ver, 0, "NOT TARGETDIR" + ver)]) + add_data( + self.db, + "RegLocator", + [ + (machine_reg, 2, install_path, None, Type), + (user_reg, 1, install_path, None, Type), + ], + ) + add_data( + self.db, + "AppSearch", + [(machine_prop, machine_reg), (user_prop, user_reg)], + ) + add_data( + self.db, + "CustomAction", + [ + ( + machine_action, + 51 + 256, + target_dir_prop, + "[" + machine_prop + "]", + ), + (user_action, 51 + 256, target_dir_prop, "[" + user_prop + "]"), + ( + exe_action, + 51 + 256, + exe_prop, + "[" + target_dir_prop + "]\\python.exe", + ), + ], + ) + add_data( + self.db, + "InstallExecuteSequence", + [ + (machine_action, machine_prop, start), + (user_action, user_prop, start + 1), + (exe_action, None, start + 2), + ], + ) + add_data( + self.db, + "InstallUISequence", + [ + (machine_action, machine_prop, start), + (user_action, user_prop, start + 1), + (exe_action, None, start + 2), + ], + ) + add_data(self.db, "Condition", [("Python" + ver, 0, "NOT TARGETDIR" + ver)]) start += 4 assert start < 500 @@ -384,10 +471,16 @@ class bdist_msi(Command): for ver in self.versions + [self.other_version]: install_action = "install_script." + ver exe_prop = "PYTHON" + ver - add_data(self.db, "CustomAction", - [(install_action, 50, exe_prop, self.install_script_key)]) - add_data(self.db, "InstallExecuteSequence", - [(install_action, "&Python%s=3" % ver, start)]) + add_data( + self.db, + "CustomAction", + [(install_action, 50, exe_prop, self.install_script_key)], + ) + add_data( + self.db, + "InstallExecuteSequence", + [(install_action, "&Python%s=3" % ver, start)], + ) start += 1 # XXX pre-install scripts are currently refused in finalize_options() # but if this feature is completed, it will also need to add @@ -406,15 +499,13 @@ class bdist_msi(Command): f.write('rem ="""\n%1 %0\nexit\n"""\n') with open(self.pre_install_script) as fin: f.write(fin.read()) - add_data(self.db, "Binary", - [("PreInstall", msilib.Binary(scriptfn)) - ]) - add_data(self.db, "CustomAction", - [("PreInstall", 2, "PreInstall", None) - ]) - add_data(self.db, "InstallExecuteSequence", - [("PreInstall", "NOT Installed", 450)]) - + add_data(self.db, "Binary", [("PreInstall", msilib.Binary(scriptfn))]) + add_data(self.db, "CustomAction", [("PreInstall", 2, "PreInstall", None)]) + add_data( + self.db, + "InstallExecuteSequence", + [("PreInstall", "NOT Installed", 450)], + ) def add_ui(self): db = self.db @@ -424,168 +515,322 @@ class bdist_msi(Command): title = "[ProductName] Setup" # see "Dialog Style Bits" - modal = 3 # visible | modal - modeless = 1 # visible + modal = 3 # visible | modal + modeless = 1 # visible track_disk_space = 32 # UI customization properties - add_data(db, "Property", - # See "DefaultUIFont Property" - [("DefaultUIFont", "DlgFont8"), - # See "ErrorDialog Style Bit" - ("ErrorDialog", "ErrorDlg"), - ("Progress1", "Install"), # modified in maintenance type dlg - ("Progress2", "installs"), - ("MaintenanceForm_Action", "Repair"), - # possible values: ALL, JUSTME - ("WhichUsers", "ALL") - ]) + add_data( + db, + "Property", + # See "DefaultUIFont Property" + [ + ("DefaultUIFont", "DlgFont8"), + # See "ErrorDialog Style Bit" + ("ErrorDialog", "ErrorDlg"), + ("Progress1", "Install"), # modified in maintenance type dlg + ("Progress2", "installs"), + ("MaintenanceForm_Action", "Repair"), + # possible values: ALL, JUSTME + ("WhichUsers", "ALL"), + ], + ) # Fonts, see "TextStyle Table" - add_data(db, "TextStyle", - [("DlgFont8", "Tahoma", 9, None, 0), - ("DlgFontBold8", "Tahoma", 8, None, 1), #bold - ("VerdanaBold10", "Verdana", 10, None, 1), - ("VerdanaRed9", "Verdana", 9, 255, 0), - ]) + add_data( + db, + "TextStyle", + [ + ("DlgFont8", "Tahoma", 9, None, 0), + ("DlgFontBold8", "Tahoma", 8, None, 1), # bold + ("VerdanaBold10", "Verdana", 10, None, 1), + ("VerdanaRed9", "Verdana", 9, 255, 0), + ], + ) # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table" # Numbers indicate sequence; see sequence.py for how these action integrate - add_data(db, "InstallUISequence", - [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140), - ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141), - # In the user interface, assume all-users installation if privileged. - ("SelectFeaturesDlg", "Not Installed", 1230), - # XXX no support for resume installations yet - #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240), - ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250), - ("ProgressDlg", None, 1280)]) + add_data( + db, + "InstallUISequence", + [ + ("PrepareDlg", "Not Privileged or Windows9x or Installed", 140), + ( + "WhichUsersDlg", + "Privileged and not Windows9x and not Installed", + 141, + ), + # In the user interface, assume all-users installation if privileged. + ("SelectFeaturesDlg", "Not Installed", 1230), + # XXX no support for resume installations yet + # ("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240), + ( + "MaintenanceTypeDlg", + "Installed AND NOT RESUME AND NOT Preselected", + 1250, + ), + ("ProgressDlg", None, 1280), + ], + ) add_data(db, 'ActionText', text.ActionText) add_data(db, 'UIText', text.UIText) ##################################################################### # Standard dialogs: FatalError, UserExit, ExitDialog - fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") + fatal = PyDialog( + db, "FatalError", x, y, w, h, modal, title, "Finish", "Finish", "Finish" + ) fatal.title("[ProductName] Installer ended prematurely") - fatal.back("< Back", "Finish", active = 0) - fatal.cancel("Cancel", "Back", active = 0) - fatal.text("Description1", 15, 70, 320, 80, 0x30003, - "[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.") - fatal.text("Description2", 15, 155, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c=fatal.next("Finish", "Cancel", name="Finish") + fatal.back("< Back", "Finish", active=0) + fatal.cancel("Cancel", "Back", active=0) + fatal.text( + "Description1", + 15, + 70, + 320, + 80, + 0x30003, + "[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.", + ) + fatal.text( + "Description2", + 15, + 155, + 320, + 20, + 0x30003, + "Click the Finish button to exit the Installer.", + ) + c = fatal.next("Finish", "Cancel", name="Finish") c.event("EndDialog", "Exit") - user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") + user_exit = PyDialog( + db, "UserExit", x, y, w, h, modal, title, "Finish", "Finish", "Finish" + ) user_exit.title("[ProductName] Installer was interrupted") - user_exit.back("< Back", "Finish", active = 0) - user_exit.cancel("Cancel", "Back", active = 0) - user_exit.text("Description1", 15, 70, 320, 80, 0x30003, - "[ProductName] setup was interrupted. Your system has not been modified. " - "To install this program at a later time, please run the installation again.") - user_exit.text("Description2", 15, 155, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") + user_exit.back("< Back", "Finish", active=0) + user_exit.cancel("Cancel", "Back", active=0) + user_exit.text( + "Description1", + 15, + 70, + 320, + 80, + 0x30003, + "[ProductName] setup was interrupted. Your system has not been modified. " + "To install this program at a later time, please run the installation again.", + ) + user_exit.text( + "Description2", + 15, + 155, + 320, + 20, + 0x30003, + "Click the Finish button to exit the Installer.", + ) c = user_exit.next("Finish", "Cancel", name="Finish") c.event("EndDialog", "Exit") - exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") + exit_dialog = PyDialog( + db, "ExitDialog", x, y, w, h, modal, title, "Finish", "Finish", "Finish" + ) exit_dialog.title("Completing the [ProductName] Installer") - exit_dialog.back("< Back", "Finish", active = 0) - exit_dialog.cancel("Cancel", "Back", active = 0) - exit_dialog.text("Description", 15, 235, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") + exit_dialog.back("< Back", "Finish", active=0) + exit_dialog.cancel("Cancel", "Back", active=0) + exit_dialog.text( + "Description", + 15, + 235, + 320, + 20, + 0x30003, + "Click the Finish button to exit the Installer.", + ) c = exit_dialog.next("Finish", "Cancel", name="Finish") c.event("EndDialog", "Return") ##################################################################### # Required dialog: FilesInUse, ErrorDlg - inuse = PyDialog(db, "FilesInUse", - x, y, w, h, - 19, # KeepModeless|Modal|Visible - title, - "Retry", "Retry", "Retry", bitmap=False) - inuse.text("Title", 15, 6, 200, 15, 0x30003, - r"{\DlgFontBold8}Files in Use") - inuse.text("Description", 20, 23, 280, 20, 0x30003, - "Some files that need to be updated are currently in use.") - inuse.text("Text", 20, 55, 330, 50, 3, - "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.") - inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess", - None, None, None) - c=inuse.back("Exit", "Ignore", name="Exit") + inuse = PyDialog( + db, + "FilesInUse", + x, + y, + w, + h, + 19, # KeepModeless|Modal|Visible + title, + "Retry", + "Retry", + "Retry", + bitmap=False, + ) + inuse.text("Title", 15, 6, 200, 15, 0x30003, r"{\DlgFontBold8}Files in Use") + inuse.text( + "Description", + 20, + 23, + 280, + 20, + 0x30003, + "Some files that need to be updated are currently in use.", + ) + inuse.text( + "Text", + 20, + 55, + 330, + 50, + 3, + "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.", + ) + inuse.control( + "List", + "ListBox", + 20, + 107, + 330, + 130, + 7, + "FileInUseProcess", + None, + None, + None, + ) + c = inuse.back("Exit", "Ignore", name="Exit") c.event("EndDialog", "Exit") - c=inuse.next("Ignore", "Retry", name="Ignore") + c = inuse.next("Ignore", "Retry", name="Ignore") c.event("EndDialog", "Ignore") - c=inuse.cancel("Retry", "Exit", name="Retry") - c.event("EndDialog","Retry") + c = inuse.cancel("Retry", "Exit", name="Retry") + c.event("EndDialog", "Retry") # See "Error Dialog". See "ICE20" for the required names of the controls. - error = Dialog(db, "ErrorDlg", - 50, 10, 330, 101, - 65543, # Error|Minimize|Modal|Visible - title, - "ErrorText", None, None) - error.text("ErrorText", 50,9,280,48,3, "") - #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None) - error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo") - error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes") - error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort") - error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel") - error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore") - error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk") - error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry") + error = Dialog( + db, + "ErrorDlg", + 50, + 10, + 330, + 101, + 65543, # Error|Minimize|Modal|Visible + title, + "ErrorText", + None, + None, + ) + error.text("ErrorText", 50, 9, 280, 48, 3, "") + # error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None) + error.pushbutton("N", 120, 72, 81, 21, 3, "No", None).event( + "EndDialog", "ErrorNo" + ) + error.pushbutton("Y", 240, 72, 81, 21, 3, "Yes", None).event( + "EndDialog", "ErrorYes" + ) + error.pushbutton("A", 0, 72, 81, 21, 3, "Abort", None).event( + "EndDialog", "ErrorAbort" + ) + error.pushbutton("C", 42, 72, 81, 21, 3, "Cancel", None).event( + "EndDialog", "ErrorCancel" + ) + error.pushbutton("I", 81, 72, 81, 21, 3, "Ignore", None).event( + "EndDialog", "ErrorIgnore" + ) + error.pushbutton("O", 159, 72, 81, 21, 3, "Ok", None).event( + "EndDialog", "ErrorOk" + ) + error.pushbutton("R", 198, 72, 81, 21, 3, "Retry", None).event( + "EndDialog", "ErrorRetry" + ) ##################################################################### # Global "Query Cancel" dialog - cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title, - "No", "No", "No") - cancel.text("Text", 48, 15, 194, 30, 3, - "Are you sure you want to cancel [ProductName] installation?") - #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None, + cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title, "No", "No", "No") + cancel.text( + "Text", + 48, + 15, + 194, + 30, + 3, + "Are you sure you want to cancel [ProductName] installation?", + ) + # cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None, # "py.ico", None, None) - c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No") + c = cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No") c.event("EndDialog", "Exit") - c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes") + c = cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes") c.event("EndDialog", "Return") ##################################################################### # Global "Wait for costing" dialog - costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title, - "Return", "Return", "Return") - costing.text("Text", 48, 15, 194, 30, 3, - "Please wait while the installer finishes determining your disk space requirements.") + costing = Dialog( + db, + "WaitForCostingDlg", + 50, + 10, + 260, + 85, + modal, + title, + "Return", + "Return", + "Return", + ) + costing.text( + "Text", + 48, + 15, + 194, + 30, + 3, + "Please wait while the installer finishes determining your disk space requirements.", + ) c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None) c.event("EndDialog", "Exit") ##################################################################### # Preparation dialog: no user input except cancellation - prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title, - "Cancel", "Cancel", "Cancel") - prep.text("Description", 15, 70, 320, 40, 0x30003, - "Please wait while the Installer prepares to guide you through the installation.") + prep = PyDialog( + db, "PrepareDlg", x, y, w, h, modeless, title, "Cancel", "Cancel", "Cancel" + ) + prep.text( + "Description", + 15, + 70, + 320, + 40, + 0x30003, + "Please wait while the Installer prepares to guide you through the installation.", + ) prep.title("Welcome to the [ProductName] Installer") - c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...") + c = prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...") c.mapping("ActionText", "Text") - c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None) + c = prep.text("ActionData", 15, 135, 320, 30, 0x30003, None) c.mapping("ActionData", "Text") prep.back("Back", None, active=0) prep.next("Next", None, active=0) - c=prep.cancel("Cancel", None) + c = prep.cancel("Cancel", None) c.event("SpawnDialog", "CancelDlg") ##################################################################### # Feature (Python directory) selection - seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title, - "Next", "Next", "Cancel") + seldlg = PyDialog( + db, "SelectFeaturesDlg", x, y, w, h, modal, title, "Next", "Next", "Cancel" + ) seldlg.title("Select Python Installations") - seldlg.text("Hint", 15, 30, 300, 20, 3, - "Select the Python locations where %s should be installed." - % self.distribution.get_fullname()) + seldlg.text( + "Hint", + 15, + 30, + 300, + 20, + 3, + "Select the Python locations where %s should be installed." + % self.distribution.get_fullname(), + ) seldlg.back("< Back", None, active=0) c = seldlg.next("Next >", "Cancel") @@ -593,30 +838,56 @@ class bdist_msi(Command): c.event("[TARGETDIR]", "[SourceDir]", ordering=order) for version in self.versions + [self.other_version]: order += 1 - c.event("[TARGETDIR]", "[TARGETDIR%s]" % version, - "FEATURE_SELECTED AND &Python%s=3" % version, - ordering=order) + c.event( + "[TARGETDIR]", + "[TARGETDIR%s]" % version, + "FEATURE_SELECTED AND &Python%s=3" % version, + ordering=order, + ) c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1) c.event("EndDialog", "Return", ordering=order + 2) c = seldlg.cancel("Cancel", "Features") c.event("SpawnDialog", "CancelDlg") - c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3, - "FEATURE", None, "PathEdit", None) + c = seldlg.control( + "Features", + "SelectionTree", + 15, + 60, + 300, + 120, + 3, + "FEATURE", + None, + "PathEdit", + None, + ) c.event("[FEATURE_SELECTED]", "1") ver = self.other_version install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver - c = seldlg.text("Other", 15, 200, 300, 15, 3, - "Provide an alternate Python location") + c = seldlg.text( + "Other", 15, 200, 300, 15, 3, "Provide an alternate Python location" + ) c.condition("Enable", install_other_cond) c.condition("Show", install_other_cond) c.condition("Disable", dont_install_other_cond) c.condition("Hide", dont_install_other_cond) - c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1, - "TARGETDIR" + ver, None, "Next", None) + c = seldlg.control( + "PathEdit", + "PathEdit", + 15, + 215, + 300, + 16, + 1, + "TARGETDIR" + ver, + None, + "Next", + None, + ) c.condition("Enable", install_other_cond) c.condition("Show", install_other_cond) c.condition("Disable", dont_install_other_cond) @@ -624,20 +895,47 @@ class bdist_msi(Command): ##################################################################### # Disk cost - cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title, - "OK", "OK", "OK", bitmap=False) - cost.text("Title", 15, 6, 200, 15, 0x30003, - r"{\DlgFontBold8}Disk Space Requirements") - cost.text("Description", 20, 20, 280, 20, 0x30003, - "The disk space required for the installation of the selected features.") - cost.text("Text", 20, 53, 330, 60, 3, - "The highlighted volumes (if any) do not have enough disk space " - "available for the currently selected features. You can either " - "remove some files from the highlighted volumes, or choose to " - "install less features onto local drive(s), or select different " - "destination drive(s).") - cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223, - None, "{120}{70}{70}{70}{70}", None, None) + cost = PyDialog( + db, "DiskCostDlg", x, y, w, h, modal, title, "OK", "OK", "OK", bitmap=False + ) + cost.text( + "Title", 15, 6, 200, 15, 0x30003, r"{\DlgFontBold8}Disk Space Requirements" + ) + cost.text( + "Description", + 20, + 20, + 280, + 20, + 0x30003, + "The disk space required for the installation of the selected features.", + ) + cost.text( + "Text", + 20, + 53, + 330, + 60, + 3, + "The highlighted volumes (if any) do not have enough disk space " + "available for the currently selected features. You can either " + "remove some files from the highlighted volumes, or choose to " + "install less features onto local drive(s), or select different " + "destination drive(s).", + ) + cost.control( + "VolumeList", + "VolumeCostList", + 20, + 100, + 330, + 150, + 393223, + None, + "{120}{70}{70}{70}{70}", + None, + None, + ) cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return") ##################################################################### @@ -651,12 +949,26 @@ class bdist_msi(Command): # On Windows9x, the ALLUSERS property is ignored on the command line # and in the Property table, but installer fails according to the documentation # if a dialog attempts to set ALLUSERS. - whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title, - "AdminInstall", "Next", "Cancel") - whichusers.title("Select whether to install [ProductName] for all users of this computer.") + whichusers = PyDialog( + db, + "WhichUsersDlg", + x, + y, + w, + h, + modal, + title, + "AdminInstall", + "Next", + "Cancel", + ) + whichusers.title( + "Select whether to install [ProductName] for all users of this computer." + ) # A radio group with two options: allusers, justme - g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3, - "WhichUsers", "", "Next") + g = whichusers.radiogroup( + "AdminInstall", 15, 60, 260, 50, 3, "WhichUsers", "", "Next" + ) g.add("ALL", 0, 5, 150, 20, "Install for all users") g.add("JUSTME", 0, 25, 150, 20, "Install just for me") @@ -664,30 +976,67 @@ class bdist_msi(Command): c = whichusers.next("Next >", "Cancel") c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1) - c.event("EndDialog", "Return", ordering = 2) + c.event("EndDialog", "Return", ordering=2) c = whichusers.cancel("Cancel", "AdminInstall") c.event("SpawnDialog", "CancelDlg") ##################################################################### # Installation Progress dialog (modeless) - progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title, - "Cancel", "Cancel", "Cancel", bitmap=False) - progress.text("Title", 20, 15, 200, 15, 0x30003, - r"{\DlgFontBold8}[Progress1] [ProductName]") - progress.text("Text", 35, 65, 300, 30, 3, - "Please wait while the Installer [Progress2] [ProductName]. " - "This may take several minutes.") + progress = PyDialog( + db, + "ProgressDlg", + x, + y, + w, + h, + modeless, + title, + "Cancel", + "Cancel", + "Cancel", + bitmap=False, + ) + progress.text( + "Title", + 20, + 15, + 200, + 15, + 0x30003, + r"{\DlgFontBold8}[Progress1] [ProductName]", + ) + progress.text( + "Text", + 35, + 65, + 300, + 30, + 3, + "Please wait while the Installer [Progress2] [ProductName]. " + "This may take several minutes.", + ) progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:") - c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...") + c = progress.text("ActionText", 70, 100, w - 70, 20, 3, "Pondering...") c.mapping("ActionText", "Text") - #c=progress.text("ActionData", 35, 140, 300, 20, 3, None) - #c.mapping("ActionData", "Text") - - c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537, - None, "Progress done", None, None) + # c=progress.text("ActionData", 35, 140, 300, 20, 3, None) + # c.mapping("ActionData", "Text") + + c = progress.control( + "ProgressBar", + "ProgressBar", + 35, + 120, + 300, + 10, + 65537, + None, + "Progress done", + None, + None, + ) c.mapping("SetProgress", "Progress") progress.back("< Back", "Next", active=False) @@ -696,23 +1045,40 @@ class bdist_msi(Command): ################################################################### # Maintenance type: repair/uninstall - maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title, - "Next", "Next", "Cancel") + maint = PyDialog( + db, "MaintenanceTypeDlg", x, y, w, h, modal, title, "Next", "Next", "Cancel" + ) maint.title("Welcome to the [ProductName] Setup Wizard") - maint.text("BodyText", 15, 63, 330, 42, 3, - "Select whether you want to repair or remove [ProductName].") - g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3, - "MaintenanceForm_Action", "", "Next") - #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]") + maint.text( + "BodyText", + 15, + 63, + 330, + 42, + 3, + "Select whether you want to repair or remove [ProductName].", + ) + g = maint.radiogroup( + "RepairRadioGroup", + 15, + 108, + 330, + 60, + 3, + "MaintenanceForm_Action", + "", + "Next", + ) + # g.add("Change", 0, 0, 200, 17, "&Change [ProductName]") g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]") g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]") maint.back("< Back", None, active=False) - c=maint.next("Finish", "Cancel") + c = maint.next("Finish", "Cancel") # Change installation: Change progress dialog to "Change", then ask # for feature selection - #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1) - #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2) + # c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1) + # c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2) # Reinstall: Change progress dialog to "Repair", then invoke reinstall # Also set list of reinstalled features to "ALL" @@ -730,15 +1096,18 @@ class bdist_msi(Command): # Close dialog when maintenance action scheduled c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20) - #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21) + # c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21) maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg") def get_installer_filename(self, fullname): # Factored out to allow overriding in subclasses if self.target_version: - base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name, - self.target_version) + base_name = "%s.%s-py%s.msi" % ( + fullname, + self.plat_name, + self.target_version, + ) else: base_name = "%s.%s.msi" % (fullname, self.plat_name) installer_name = os.path.join(self.dist_dir, base_name) diff --git a/setuptools/_distutils/command/bdist_rpm.py b/setuptools/_distutils/command/bdist_rpm.py index a2a9e8e..cf4b952 100644 --- a/setuptools/_distutils/command/bdist_rpm.py +++ b/setuptools/_distutils/command/bdist_rpm.py @@ -11,126 +11,137 @@ from distutils.errors import * from distutils.sysconfig import get_python_version from distutils import log + class bdist_rpm(Command): description = "create an RPM distribution" user_options = [ - ('bdist-base=', None, - "base directory for creating built distributions"), - ('rpm-base=', None, - "base directory for creating RPMs (defaults to \"rpm\" under " - "--bdist-base; must be specified for RPM 2)"), - ('dist-dir=', 'd', - "directory to put final RPM files in " - "(and .spec files if --spec-only)"), - ('python=', None, - "path to Python interpreter to hard-code in the .spec file " - "(default: \"python\")"), - ('fix-python', None, - "hard-code the exact path to the current Python interpreter in " - "the .spec file"), - ('spec-only', None, - "only regenerate spec file"), - ('source-only', None, - "only generate source RPM"), - ('binary-only', None, - "only generate binary RPM"), - ('use-bzip2', None, - "use bzip2 instead of gzip to create source distribution"), - + ('bdist-base=', None, "base directory for creating built distributions"), + ( + 'rpm-base=', + None, + "base directory for creating RPMs (defaults to \"rpm\" under " + "--bdist-base; must be specified for RPM 2)", + ), + ( + 'dist-dir=', + 'd', + "directory to put final RPM files in " "(and .spec files if --spec-only)", + ), + ( + 'python=', + None, + "path to Python interpreter to hard-code in the .spec file " + "(default: \"python\")", + ), + ( + 'fix-python', + None, + "hard-code the exact path to the current Python interpreter in " + "the .spec file", + ), + ('spec-only', None, "only regenerate spec file"), + ('source-only', None, "only generate source RPM"), + ('binary-only', None, "only generate binary RPM"), + ('use-bzip2', None, "use bzip2 instead of gzip to create source distribution"), # More meta-data: too RPM-specific to put in the setup script, # but needs to go in the .spec file -- so we make these options # to "bdist_rpm". The idea is that packagers would put this # info in setup.cfg, although they are of course free to # supply it on the command line. - ('distribution-name=', None, - "name of the (Linux) distribution to which this " - "RPM applies (*not* the name of the module distribution!)"), - ('group=', None, - "package classification [default: \"Development/Libraries\"]"), - ('release=', None, - "RPM release number"), - ('serial=', None, - "RPM serial number"), - ('vendor=', None, - "RPM \"vendor\" (eg. \"Joe Blow \") " - "[default: maintainer or author from setup script]"), - ('packager=', None, - "RPM packager (eg. \"Jane Doe \") " - "[default: vendor]"), - ('doc-files=', None, - "list of documentation files (space or comma-separated)"), - ('changelog=', None, - "RPM changelog"), - ('icon=', None, - "name of icon file"), - ('provides=', None, - "capabilities provided by this package"), - ('requires=', None, - "capabilities required by this package"), - ('conflicts=', None, - "capabilities which conflict with this package"), - ('build-requires=', None, - "capabilities required to build this package"), - ('obsoletes=', None, - "capabilities made obsolete by this package"), - ('no-autoreq', None, - "do not automatically calculate dependencies"), - + ( + 'distribution-name=', + None, + "name of the (Linux) distribution to which this " + "RPM applies (*not* the name of the module distribution!)", + ), + ('group=', None, "package classification [default: \"Development/Libraries\"]"), + ('release=', None, "RPM release number"), + ('serial=', None, "RPM serial number"), + ( + 'vendor=', + None, + "RPM \"vendor\" (eg. \"Joe Blow \") " + "[default: maintainer or author from setup script]", + ), + ( + 'packager=', + None, + "RPM packager (eg. \"Jane Doe \") " "[default: vendor]", + ), + ('doc-files=', None, "list of documentation files (space or comma-separated)"), + ('changelog=', None, "RPM changelog"), + ('icon=', None, "name of icon file"), + ('provides=', None, "capabilities provided by this package"), + ('requires=', None, "capabilities required by this package"), + ('conflicts=', None, "capabilities which conflict with this package"), + ('build-requires=', None, "capabilities required to build this package"), + ('obsoletes=', None, "capabilities made obsolete by this package"), + ('no-autoreq', None, "do not automatically calculate dependencies"), # Actions to take when building RPM - ('keep-temp', 'k', - "don't clean up RPM build directory"), - ('no-keep-temp', None, - "clean up RPM build directory [default]"), - ('use-rpm-opt-flags', None, - "compile with RPM_OPT_FLAGS when building from source RPM"), - ('no-rpm-opt-flags', None, - "do not pass any RPM CFLAGS to compiler"), - ('rpm3-mode', None, - "RPM 3 compatibility mode (default)"), - ('rpm2-mode', None, - "RPM 2 compatibility mode"), - + ('keep-temp', 'k', "don't clean up RPM build directory"), + ('no-keep-temp', None, "clean up RPM build directory [default]"), + ( + 'use-rpm-opt-flags', + None, + "compile with RPM_OPT_FLAGS when building from source RPM", + ), + ('no-rpm-opt-flags', None, "do not pass any RPM CFLAGS to compiler"), + ('rpm3-mode', None, "RPM 3 compatibility mode (default)"), + ('rpm2-mode', None, "RPM 2 compatibility mode"), # Add the hooks necessary for specifying custom scripts - ('prep-script=', None, - "Specify a script for the PREP phase of RPM building"), - ('build-script=', None, - "Specify a script for the BUILD phase of RPM building"), - - ('pre-install=', None, - "Specify a script for the pre-INSTALL phase of RPM building"), - ('install-script=', None, - "Specify a script for the INSTALL phase of RPM building"), - ('post-install=', None, - "Specify a script for the post-INSTALL phase of RPM building"), - - ('pre-uninstall=', None, - "Specify a script for the pre-UNINSTALL phase of RPM building"), - ('post-uninstall=', None, - "Specify a script for the post-UNINSTALL phase of RPM building"), - - ('clean-script=', None, - "Specify a script for the CLEAN phase of RPM building"), - - ('verify-script=', None, - "Specify a script for the VERIFY phase of the RPM build"), - + ('prep-script=', None, "Specify a script for the PREP phase of RPM building"), + ('build-script=', None, "Specify a script for the BUILD phase of RPM building"), + ( + 'pre-install=', + None, + "Specify a script for the pre-INSTALL phase of RPM building", + ), + ( + 'install-script=', + None, + "Specify a script for the INSTALL phase of RPM building", + ), + ( + 'post-install=', + None, + "Specify a script for the post-INSTALL phase of RPM building", + ), + ( + 'pre-uninstall=', + None, + "Specify a script for the pre-UNINSTALL phase of RPM building", + ), + ( + 'post-uninstall=', + None, + "Specify a script for the post-UNINSTALL phase of RPM building", + ), + ('clean-script=', None, "Specify a script for the CLEAN phase of RPM building"), + ( + 'verify-script=', + None, + "Specify a script for the VERIFY phase of the RPM build", + ), # Allow a packager to explicitly force an architecture - ('force-arch=', None, - "Force an architecture onto the RPM build process"), - - ('quiet', 'q', - "Run the INSTALL phase of RPM building in quiet mode"), - ] - - boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode', - 'no-autoreq', 'quiet'] - - negative_opt = {'no-keep-temp': 'keep-temp', - 'no-rpm-opt-flags': 'use-rpm-opt-flags', - 'rpm2-mode': 'rpm3-mode'} - + ('force-arch=', None, "Force an architecture onto the RPM build process"), + ('quiet', 'q', "Run the INSTALL phase of RPM building in quiet mode"), + ] + + boolean_options = [ + 'keep-temp', + 'use-rpm-opt-flags', + 'rpm3-mode', + 'no-autoreq', + 'quiet', + ] + + negative_opt = { + 'no-keep-temp': 'keep-temp', + 'no-rpm-opt-flags': 'use-rpm-opt-flags', + 'rpm2-mode': 'rpm3-mode', + } def initialize_options(self): self.bdist_base = None @@ -181,8 +192,7 @@ class bdist_rpm(Command): self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) if self.rpm_base is None: if not self.rpm3_mode: - raise DistutilsOptionError( - "you must specify --rpm-base in RPM 2 mode") + raise DistutilsOptionError("you must specify --rpm-base in RPM 2 mode") self.rpm_base = os.path.join(self.bdist_base, "rpm") if self.python is None: @@ -192,14 +202,17 @@ class bdist_rpm(Command): self.python = "python3" elif self.fix_python: raise DistutilsOptionError( - "--python and --fix-python are mutually exclusive options") + "--python and --fix-python are mutually exclusive options" + ) if os.name != 'posix': - raise DistutilsPlatformError("don't know how to create RPM " - "distributions on platform %s" % os.name) + raise DistutilsPlatformError( + "don't know how to create RPM " "distributions on platform %s" % os.name + ) if self.binary_only and self.source_only: raise DistutilsOptionError( - "cannot supply both '--source-only' and '--binary-only'") + "cannot supply both '--source-only' and '--binary-only'" + ) # don't pass CFLAGS to pure python distributions if not self.distribution.has_ext_modules(): @@ -210,9 +223,11 @@ class bdist_rpm(Command): def finalize_package_data(self): self.ensure_string('group', "Development/Libraries") - self.ensure_string('vendor', - "%s <%s>" % (self.distribution.get_contact(), - self.distribution.get_contact_email())) + self.ensure_string( + 'vendor', + "%s <%s>" + % (self.distribution.get_contact(), self.distribution.get_contact_email()), + ) self.ensure_string('packager') self.ensure_string_list('doc_files') if isinstance(self.doc_files, list): @@ -221,12 +236,12 @@ class bdist_rpm(Command): self.doc_files.append(readme) self.ensure_string('release', "1") - self.ensure_string('serial') # should it be an int? + self.ensure_string('serial') # should it be an int? self.ensure_string('distribution_name') self.ensure_string('changelog') - # Format changelog correctly + # Format changelog correctly self.changelog = self._format_changelog(self.changelog) self.ensure_filename('icon') @@ -274,14 +289,12 @@ class bdist_rpm(Command): # Spec file goes into 'dist_dir' if '--spec-only specified', # build/rpm. otherwise. - spec_path = os.path.join(spec_dir, - "%s.spec" % self.distribution.get_name()) - self.execute(write_file, - (spec_path, - self._make_spec_file()), - "writing '%s'" % spec_path) - - if self.spec_only: # stop if requested + spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name()) + self.execute( + write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path + ) + + if self.spec_only: # stop if requested return # Make a source distribution and copy to SOURCES directory with @@ -303,14 +316,13 @@ class bdist_rpm(Command): if os.path.exists(self.icon): self.copy_file(self.icon, source_dir) else: - raise DistutilsFileError( - "icon file '%s' does not exist" % self.icon) + raise DistutilsFileError("icon file '%s' does not exist" % self.icon) # build package log.info("building RPMs") rpm_cmd = ['rpmbuild'] - if self.source_only: # what kind of RPMs? + if self.source_only: # what kind of RPMs? rpm_cmd.append('-bs') elif self.binary_only: rpm_cmd.append('-bb') @@ -318,8 +330,7 @@ class bdist_rpm(Command): rpm_cmd.append('-ba') rpm_cmd.extend(['--define', '__python %s' % self.python]) if self.rpm3_mode: - rpm_cmd.extend(['--define', - '_topdir %s' % os.path.abspath(self.rpm_base)]) + rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)]) if not self.keep_temp: rpm_cmd.append('--clean') @@ -335,7 +346,10 @@ class bdist_rpm(Command): src_rpm = nvr_string + ".src.rpm" non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm" q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % ( - src_rpm, non_src_rpm, spec_path) + src_rpm, + non_src_rpm, + spec_path, + ) out = os.popen(q_cmd) try: @@ -346,7 +360,7 @@ class bdist_rpm(Command): if not line: break l = line.strip().split() - assert(len(l) == 2) + assert len(l) == 2 binary_rpms.append(l[1]) # The source rpm is named after the first entry in the spec file if source_rpm is None: @@ -369,21 +383,20 @@ class bdist_rpm(Command): if not self.binary_only: srpm = os.path.join(rpm_dir['SRPMS'], source_rpm) - assert(os.path.exists(srpm)) + assert os.path.exists(srpm) self.move_file(srpm, self.dist_dir) filename = os.path.join(self.dist_dir, source_rpm) - self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) + self.distribution.dist_files.append(('bdist_rpm', pyversion, filename)) if not self.source_only: for rpm in binary_rpms: rpm = os.path.join(rpm_dir['RPMS'], rpm) if os.path.exists(rpm): self.move_file(rpm, self.dist_dir) - filename = os.path.join(self.dist_dir, - os.path.basename(rpm)) + filename = os.path.join(self.dist_dir, os.path.basename(rpm)) self.distribution.dist_files.append( - ('bdist_rpm', pyversion, filename)) + ('bdist_rpm', pyversion, filename) + ) def _dist_path(self, path): return os.path.join(self.dist_dir, os.path.basename(path)) @@ -395,12 +408,12 @@ class bdist_rpm(Command): # definitions and headers spec_file = [ '%define name ' + self.distribution.get_name(), - '%define version ' + self.distribution.get_version().replace('-','_'), + '%define version ' + self.distribution.get_version().replace('-', '_'), '%define unmangled_version ' + self.distribution.get_version(), - '%define release ' + self.release.replace('-','_'), + '%define release ' + self.release.replace('-', '_'), '', 'Summary: ' + (self.distribution.get_description() or "UNKNOWN"), - ] + ] # Workaround for #14443 which affects some RPM based systems such as # RHEL6 (and probably derivatives) @@ -408,8 +421,9 @@ class bdist_rpm(Command): # Generate a potential replacement value for __os_install_post (whilst # normalizing the whitespace to simplify the test for whether the # invocation of brp-python-bytecompile passes in __python): - vendor_hook = '\n'.join([' %s \\' % line.strip() - for line in vendor_hook.splitlines()]) + vendor_hook = '\n'.join( + [' %s \\' % line.strip() for line in vendor_hook.splitlines()] + ) problem = "brp-python-bytecompile \\\n" fixed = "brp-python-bytecompile %{__python} \\\n" fixed_hook = vendor_hook.replace(problem, fixed) @@ -420,14 +434,17 @@ class bdist_rpm(Command): # put locale summaries into spec file # XXX not supported for now (hard to put a dictionary # in a config file -- arg!) - #for locale in self.summaries.keys(): + # for locale in self.summaries.keys(): # spec_file.append('Summary(%s): %s' % (locale, # self.summaries[locale])) - spec_file.extend([ - 'Name: %{name}', - 'Version: %{version}', - 'Release: %{release}',]) + spec_file.extend( + [ + 'Name: %{name}', + 'Version: %{version}', + 'Release: %{release}', + ] + ) # XXX yuck! this filename is available from the "sdist" command, # but only after it has run: and we create the spec file before @@ -437,33 +454,36 @@ class bdist_rpm(Command): else: spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz') - spec_file.extend([ - 'License: ' + (self.distribution.get_license() or "UNKNOWN"), - 'Group: ' + self.group, - 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', - 'Prefix: %{_prefix}', ]) + spec_file.extend( + [ + 'License: ' + (self.distribution.get_license() or "UNKNOWN"), + 'Group: ' + self.group, + 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', + 'Prefix: %{_prefix}', + ] + ) if not self.force_arch: # noarch if no extension modules if not self.distribution.has_ext_modules(): spec_file.append('BuildArch: noarch') else: - spec_file.append( 'BuildArch: %s' % self.force_arch ) - - for field in ('Vendor', - 'Packager', - 'Provides', - 'Requires', - 'Conflicts', - 'Obsoletes', - ): + spec_file.append('BuildArch: %s' % self.force_arch) + + for field in ( + 'Vendor', + 'Packager', + 'Provides', + 'Requires', + 'Conflicts', + 'Obsoletes', + ): val = getattr(self, field.lower()) if isinstance(val, list): spec_file.append('%s: %s' % (field, ' '.join(val))) elif val is not None: spec_file.append('%s: %s' % (field, val)) - if self.distribution.get_url(): spec_file.append('Url: ' + self.distribution.get_url()) @@ -471,8 +491,7 @@ class bdist_rpm(Command): spec_file.append('Distribution: ' + self.distribution_name) if self.build_requires: - spec_file.append('BuildRequires: ' + - ' '.join(self.build_requires)) + spec_file.append('BuildRequires: ' + ' '.join(self.build_requires)) if self.icon: spec_file.append('Icon: ' + os.path.basename(self.icon)) @@ -480,16 +499,18 @@ class bdist_rpm(Command): if self.no_autoreq: spec_file.append('AutoReq: 0') - spec_file.extend([ - '', - '%description', - self.distribution.get_long_description() or "", - ]) + spec_file.extend( + [ + '', + '%description', + self.distribution.get_long_description() or "", + ] + ) # put locale descriptions into spec file # XXX again, suppressed because config file syntax doesn't # easily support this ;-( - #for locale in self.descriptions.keys(): + # for locale in self.descriptions.keys(): # spec_file.extend([ # '', # '%description -l ' + locale, @@ -498,7 +519,7 @@ class bdist_rpm(Command): # rpm scripts # figure out default build script - def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0])) + def_setup_call = "%s %s" % (self.python, os.path.basename(sys.argv[0])) def_build = "%s build" % def_setup_call if self.use_rpm_opt_flags: def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build @@ -509,8 +530,9 @@ class bdist_rpm(Command): # that we open and interpolate into the spec file, but the defaults # are just text that we drop in as-is. Hmmm. - install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT ' - '--record=INSTALLED_FILES') % def_setup_call + install_cmd = ( + '%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES' + ) % def_setup_call script_options = [ ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"), @@ -529,37 +551,43 @@ class bdist_rpm(Command): # use 'default' as contents of script val = getattr(self, attr) if val or default: - spec_file.extend([ - '', - '%' + rpm_opt,]) + spec_file.extend( + [ + '', + '%' + rpm_opt, + ] + ) if val: with open(val) as f: spec_file.extend(f.read().split('\n')) else: spec_file.append(default) - # files section - spec_file.extend([ - '', - '%files -f INSTALLED_FILES', - '%defattr(-,root,root)', - ]) + spec_file.extend( + [ + '', + '%files -f INSTALLED_FILES', + '%defattr(-,root,root)', + ] + ) if self.doc_files: spec_file.append('%doc ' + ' '.join(self.doc_files)) if self.changelog: - spec_file.extend([ - '', - '%changelog',]) + spec_file.extend( + [ + '', + '%changelog', + ] + ) spec_file.extend(self.changelog) return spec_file def _format_changelog(self, changelog): - """Format the changelog correctly and convert it to a list of strings - """ + """Format the changelog correctly and convert it to a list of strings""" if not changelog: return changelog new_changelog = [] diff --git a/setuptools/_distutils/command/bdist_wininst.py b/setuptools/_distutils/command/bdist_wininst.py index 0e9ddaa..76b8a89 100644 --- a/setuptools/_distutils/command/bdist_wininst.py +++ b/setuptools/_distutils/command/bdist_wininst.py @@ -13,58 +13,88 @@ from distutils.errors import * from distutils.sysconfig import get_python_version from distutils import log + class bdist_wininst(Command): description = "create an executable installer for MS Windows" - user_options = [('bdist-dir=', None, - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('target-version=', None, - "require a specific python version" + - " on the target system"), - ('no-target-compile', 'c', - "do not compile .py to .pyc on the target system"), - ('no-target-optimize', 'o', - "do not compile .py to .pyo (optimized) " - "on the target system"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('bitmap=', 'b', - "bitmap to use for the installer instead of python-powered logo"), - ('title=', 't', - "title to display on the installer background instead of default"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('install-script=', None, - "basename of installation script to be run after " - "installation or before deinstallation"), - ('pre-install-script=', None, - "Fully qualified filename of a script to be run before " - "any files are installed. This script need not be in the " - "distribution"), - ('user-access-control=', None, - "specify Vista's UAC handling - 'none'/default=no " - "handling, 'auto'=use UAC if target Python installed for " - "all users, 'force'=always use UAC"), - ] - - boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', - 'skip-build'] + user_options = [ + ('bdist-dir=', None, "temporary directory for creating the distribution"), + ( + 'plat-name=', + 'p', + "platform name to embed in generated filenames " + "(default: %s)" % get_platform(), + ), + ( + 'keep-temp', + 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive", + ), + ( + 'target-version=', + None, + "require a specific python version" + " on the target system", + ), + ('no-target-compile', 'c', "do not compile .py to .pyc on the target system"), + ( + 'no-target-optimize', + 'o', + "do not compile .py to .pyo (optimized) " "on the target system", + ), + ('dist-dir=', 'd', "directory to put final built distributions in"), + ( + 'bitmap=', + 'b', + "bitmap to use for the installer instead of python-powered logo", + ), + ( + 'title=', + 't', + "title to display on the installer background instead of default", + ), + ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), + ( + 'install-script=', + None, + "basename of installation script to be run after " + "installation or before deinstallation", + ), + ( + 'pre-install-script=', + None, + "Fully qualified filename of a script to be run before " + "any files are installed. This script need not be in the " + "distribution", + ), + ( + 'user-access-control=', + None, + "specify Vista's UAC handling - 'none'/default=no " + "handling, 'auto'=use UAC if target Python installed for " + "all users, 'force'=always use UAC", + ), + ] + + boolean_options = [ + 'keep-temp', + 'no-target-compile', + 'no-target-optimize', + 'skip-build', + ] # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows - _unsupported = (sys.platform != "win32") + _unsupported = sys.platform != "win32" def __init__(self, *args, **kw): super().__init__(*args, **kw) - warnings.warn("bdist_wininst command is deprecated since Python 3.8, " - "use bdist_wheel (wheel packages) instead", - DeprecationWarning, 2) + warnings.warn( + "bdist_wininst command is deprecated since Python 3.8, " + "use bdist_wheel (wheel packages) instead", + DeprecationWarning, + 2, + ) def initialize_options(self): self.bdist_dir = None @@ -81,7 +111,6 @@ class bdist_wininst(Command): self.pre_install_script = None self.user_access_control = None - def finalize_options(self): self.set_undefined_options('bdist', ('skip_build', 'skip_build')) @@ -102,14 +131,16 @@ class bdist_wininst(Command): short_version = get_python_version() if self.target_version and self.target_version != short_version: raise DistutilsOptionError( - "target version can only be %s, or the '--skip-build'" \ - " option must be specified" % (short_version,)) + "target version can only be %s, or the '--skip-build'" + " option must be specified" % (short_version,) + ) self.target_version = short_version - self.set_undefined_options('bdist', - ('dist_dir', 'dist_dir'), - ('plat_name', 'plat_name'), - ) + self.set_undefined_options( + 'bdist', + ('dist_dir', 'dist_dir'), + ('plat_name', 'plat_name'), + ) if self.install_script: for script in self.distribution.scripts: @@ -117,16 +148,17 @@ class bdist_wininst(Command): break else: raise DistutilsOptionError( - "install_script '%s' not found in scripts" - % self.install_script) + "install_script '%s' not found in scripts" % self.install_script + ) def run(self): - if (sys.platform != "win32" and - (self.distribution.has_ext_modules() or - self.distribution.has_c_libraries())): - raise DistutilsPlatformError \ - ("distribution contains extensions and/or C libraries; " - "must be compiled on a Windows 32 platform") + if sys.platform != "win32" and ( + self.distribution.has_ext_modules() or self.distribution.has_c_libraries() + ): + raise DistutilsPlatformError( + "distribution contains extensions and/or C libraries; " + "must be compiled on a Windows 32 platform" + ) if not self.skip_build: self.run_command('build') @@ -155,8 +187,7 @@ class bdist_wininst(Command): target_version = '%d.%d' % sys.version_info[:2] plat_specifier = ".%s-%s" % (self.plat_name, target_version) build = self.get_finalized_command('build') - build.build_lib = os.path.join(build.build_base, - 'lib' + plat_specifier) + build.build_lib = os.path.join(build.build_base, 'lib' + plat_specifier) # Use a custom scheme for the zip-file, because we have to decide # at installation time which scheme to use. @@ -164,9 +195,7 @@ class bdist_wininst(Command): value = key.upper() if key == 'headers': value = value + '/Include/$dist_name' - setattr(install, - 'install_' + key, - value) + setattr(install, 'install_' + key, value) log.info("installing to %s", self.bdist_dir) install.ensure_finalized() @@ -182,18 +211,19 @@ class bdist_wininst(Command): # And make an archive relative to the root of the # pseudo-installation tree. from tempfile import mktemp + archive_basename = mktemp() fullname = self.distribution.get_fullname() - arcname = self.make_archive(archive_basename, "zip", - root_dir=self.bdist_dir) + arcname = self.make_archive(archive_basename, "zip", root_dir=self.bdist_dir) # create an exe containing the zip-file self.create_exe(arcname, fullname, self.bitmap) if self.distribution.has_ext_modules(): pyversion = get_python_version() else: pyversion = 'any' - self.distribution.dist_files.append(('bdist_wininst', pyversion, - self.get_installer_filename(fullname))) + self.distribution.dist_files.append( + ('bdist_wininst', pyversion, self.get_installer_filename(fullname)) + ) # remove the zip-file again log.debug("removing temporary file '%s'", arcname) os.remove(arcname) @@ -217,12 +247,19 @@ class bdist_wininst(Command): def escape(s): return s.replace("\n", "\\n") - for name in ["author", "author_email", "description", "maintainer", - "maintainer_email", "name", "url", "version"]: + for name in [ + "author", + "author_email", + "description", + "maintainer", + "maintainer_email", + "name", + "url", + "version", + ]: data = getattr(metadata, name, "") if data: - info = info + ("\n %s: %s" % \ - (name.capitalize(), escape(data))) + info = info + ("\n %s: %s" % (name.capitalize(), escape(data))) lines.append("%s=%s" % (name, escape(data))) # The [setup] section contains entries controlling @@ -242,8 +279,11 @@ class bdist_wininst(Command): lines.append("title=%s" % escape(title)) import time import distutils - build_info = "Built %s with distutils-%s" % \ - (time.ctime(time.time()), distutils.__version__) + + build_info = "Built %s with distutils-%s" % ( + time.ctime(time.time()), + distutils.__version__, + ) lines.append("build_info=%s" % build_info) return "\n".join(lines) @@ -279,8 +319,7 @@ class bdist_wininst(Command): # We need to normalize newlines, so we open in text mode and # convert back to bytes. "latin-1" simply avoids any possible # failures. - with open(self.pre_install_script, "r", - encoding="latin-1") as script: + with open(self.pre_install_script, "r", encoding="latin-1") as script: script_data = script.read().encode("latin-1") cfgdata = cfgdata + script_data + b"\n\0" else: @@ -293,11 +332,12 @@ class bdist_wininst(Command): # expects. If the layout changes, increment that number, make # the corresponding changes to the wininst.exe sources, and # recompile them. - header = struct.pack("" if self.build_temp is None: - self.build_temp = os.path.join(self.build_base, - 'temp' + plat_specifier) + self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier) if self.build_scripts is None: - self.build_scripts = os.path.join(self.build_base, - 'scripts-%d.%d' % sys.version_info[:2]) + self.build_scripts = os.path.join( + self.build_base, 'scripts-%d.%d' % sys.version_info[:2] + ) if self.executable is None and sys.executable: self.executable = os.path.normpath(sys.executable) @@ -135,7 +130,6 @@ class build(Command): for cmd_name in self.get_sub_commands(): self.run_command(cmd_name) - # -- Predicates for the sub-command list --------------------------- def has_pure_modules(self): @@ -150,9 +144,9 @@ class build(Command): def has_scripts(self): return self.distribution.has_scripts() - - sub_commands = [('build_py', has_pure_modules), - ('build_clib', has_c_libraries), - ('build_ext', has_ext_modules), - ('build_scripts', has_scripts), - ] + sub_commands = [ + ('build_py', has_pure_modules), + ('build_clib', has_c_libraries), + ('build_ext', has_ext_modules), + ('build_scripts', has_scripts), + ] diff --git a/setuptools/_distutils/command/build_clib.py b/setuptools/_distutils/command/build_clib.py index 3e20ef2..003499f 100644 --- a/setuptools/_distutils/command/build_clib.py +++ b/setuptools/_distutils/command/build_clib.py @@ -20,8 +20,10 @@ from distutils.errors import * from distutils.sysconfig import customize_compiler from distutils import log + def show_compilers(): from distutils.ccompiler import show_compilers + show_compilers() @@ -30,24 +32,18 @@ class build_clib(Command): description = "build C/C++ libraries used by Python extensions" user_options = [ - ('build-clib=', 'b', - "directory to build C/C++ libraries to"), - ('build-temp=', 't', - "directory to put temporary build by-products"), - ('debug', 'g', - "compile with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ] + ('build-clib=', 'b', "directory to build C/C++ libraries to"), + ('build-temp=', 't', "directory to put temporary build by-products"), + ('debug', 'g', "compile with debugging information"), + ('force', 'f', "forcibly build everything (ignore file timestamps)"), + ('compiler=', 'c', "specify the compiler type"), + ] boolean_options = ['debug', 'force'] help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] + ('help-compiler', None, "list available compilers", show_compilers), + ] def initialize_options(self): self.build_clib = None @@ -64,19 +60,20 @@ class build_clib(Command): self.force = 0 self.compiler = None - def finalize_options(self): # This might be confusing: both build-clib and build-temp default # to build-temp as defined by the "build" command. This is because # I think that C libraries are really just temporary build # by-products, at least from the point of view of building Python # extensions -- but I want to keep my options open. - self.set_undefined_options('build', - ('build_temp', 'build_clib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force')) + self.set_undefined_options( + 'build', + ('build_temp', 'build_clib'), + ('build_temp', 'build_temp'), + ('compiler', 'compiler'), + ('debug', 'debug'), + ('force', 'force'), + ) self.libraries = self.distribution.libraries if self.libraries: @@ -90,23 +87,23 @@ class build_clib(Command): # XXX same as for build_ext -- what about 'self.define' and # 'self.undef' ? - def run(self): if not self.libraries: return # Yech -- this is cut 'n pasted from build_ext.py! from distutils.ccompiler import new_compiler - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, - force=self.force) + + self.compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=self.force + ) customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: + for (name, value) in self.define: self.compiler.define_macro(name, value) if self.undef is not None: for macro in self.undef: @@ -114,7 +111,6 @@ class build_clib(Command): self.build_libraries(self.libraries) - def check_library_list(self, libraries): """Ensure that the list of libraries is valid. @@ -126,30 +122,31 @@ class build_clib(Command): just returns otherwise. """ if not isinstance(libraries, list): - raise DistutilsSetupError( - "'libraries' option must be a list of tuples") + raise DistutilsSetupError("'libraries' option must be a list of tuples") for lib in libraries: if not isinstance(lib, tuple) and len(lib) != 2: - raise DistutilsSetupError( - "each element of 'libraries' must a 2-tuple") + raise DistutilsSetupError("each element of 'libraries' must a 2-tuple") name, build_info = lib if not isinstance(name, str): raise DistutilsSetupError( - "first element of each tuple in 'libraries' " - "must be a string (the library name)") + "first element of each tuple in 'libraries' " + "must be a string (the library name)" + ) if '/' in name or (os.sep != '/' and os.sep in name): - raise DistutilsSetupError("bad library name '%s': " - "may not contain directory separators" % lib[0]) + raise DistutilsSetupError( + "bad library name '%s': " + "may not contain directory separators" % lib[0] + ) if not isinstance(build_info, dict): raise DistutilsSetupError( - "second element of each tuple in 'libraries' " - "must be a dictionary (build info)") - + "second element of each tuple in 'libraries' " + "must be a dictionary (build info)" + ) def get_library_names(self): # Assume the library list is valid -- 'check_library_list()' is @@ -162,7 +159,6 @@ class build_clib(Command): lib_names.append(lib_name) return lib_names - def get_source_files(self): self.check_library_list(self.libraries) filenames = [] @@ -170,22 +166,23 @@ class build_clib(Command): sources = build_info.get('sources') if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name + ) filenames.extend(sources) return filenames - def build_libraries(self, libraries): for (lib_name, build_info) in libraries: sources = build_info.get('sources') if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( - "in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % lib_name) + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name + ) sources = list(sources) log.info("building '%s' library", lib_name) @@ -195,15 +192,17 @@ class build_clib(Command): # files in a temporary build directory.) macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=include_dirs, - debug=self.debug) + objects = self.compiler.compile( + sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=include_dirs, + debug=self.debug, + ) # Now "link" the object files together into a static library. # (On Unix at least, this isn't really linking -- it just # builds an archive. Whatever.) - self.compiler.create_static_lib(objects, lib_name, - output_dir=self.build_clib, - debug=self.debug) + self.compiler.create_static_lib( + objects, lib_name, output_dir=self.build_clib, debug=self.debug + ) diff --git a/setuptools/_distutils/command/build_ext.py b/setuptools/_distutils/command/build_ext.py index 181671b..1a6dd39 100644 --- a/setuptools/_distutils/command/build_ext.py +++ b/setuptools/_distutils/command/build_ext.py @@ -22,12 +22,12 @@ from site import USER_BASE # An extension name is just a dot-separated list of Python NAMEs (ie. # the same as a fully-qualified module name). -extension_name_re = re.compile \ - (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') +extension_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') -def show_compilers (): +def show_compilers(): from distutils.ccompiler import show_compilers + show_compilers() @@ -55,54 +55,50 @@ class build_ext(Command): sep_by = " (separated by '%s')" % os.pathsep user_options = [ - ('build-lib=', 'b', - "directory for compiled extension modules"), - ('build-temp=', 't', - "directory for temporary files (build by-products)"), - ('plat-name=', 'p', - "platform name to cross-compile for, if supported " - "(default: %s)" % get_platform()), - ('inplace', 'i', - "ignore build-lib and put compiled extensions into the source " + - "directory alongside your pure Python modules"), - ('include-dirs=', 'I', - "list of directories to search for header files" + sep_by), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries" + sep_by), - ('rpath=', 'R', - "directories to search for shared C libraries at runtime"), - ('link-objects=', 'O', - "extra explicit link objects to include in the link"), - ('debug', 'g', - "compile/link with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ('parallel=', 'j', - "number of parallel build jobs"), - ('swig-cpp', None, - "make SWIG create C++ files (default is C)"), - ('swig-opts=', None, - "list of SWIG command line options"), - ('swig=', None, - "path to the SWIG executable"), - ('user', None, - "add user include, library and rpath") - ] + ('build-lib=', 'b', "directory for compiled extension modules"), + ('build-temp=', 't', "directory for temporary files (build by-products)"), + ( + 'plat-name=', + 'p', + "platform name to cross-compile for, if supported " + "(default: %s)" % get_platform(), + ), + ( + 'inplace', + 'i', + "ignore build-lib and put compiled extensions into the source " + + "directory alongside your pure Python modules", + ), + ( + 'include-dirs=', + 'I', + "list of directories to search for header files" + sep_by, + ), + ('define=', 'D', "C preprocessor macros to define"), + ('undef=', 'U', "C preprocessor macros to undefine"), + ('libraries=', 'l', "external C libraries to link with"), + ( + 'library-dirs=', + 'L', + "directories to search for external C libraries" + sep_by, + ), + ('rpath=', 'R', "directories to search for shared C libraries at runtime"), + ('link-objects=', 'O', "extra explicit link objects to include in the link"), + ('debug', 'g', "compile/link with debugging information"), + ('force', 'f', "forcibly build everything (ignore file timestamps)"), + ('compiler=', 'c', "specify the compiler type"), + ('parallel=', 'j', "number of parallel build jobs"), + ('swig-cpp', None, "make SWIG create C++ files (default is C)"), + ('swig-opts=', None, "list of SWIG command line options"), + ('swig=', None, "path to the SWIG executable"), + ('user', None, "add user include, library and rpath"), + ] boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user'] help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] + ('help-compiler', None, "list available compilers", show_compilers), + ] def initialize_options(self): self.extensions = None @@ -131,15 +127,16 @@ class build_ext(Command): def finalize_options(self): from distutils import sysconfig - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('build_temp', 'build_temp'), - ('compiler', 'compiler'), - ('debug', 'debug'), - ('force', 'force'), - ('parallel', 'parallel'), - ('plat_name', 'plat_name'), - ) + self.set_undefined_options( + 'build', + ('build_lib', 'build_lib'), + ('build_temp', 'build_temp'), + ('compiler', 'compiler'), + ('debug', 'debug'), + ('force', 'force'), + ('parallel', 'parallel'), + ('plat_name', 'plat_name'), + ) if self.package is None: self.package = self.distribution.ext_package @@ -164,8 +161,7 @@ class build_ext(Command): # any local include dirs take precedence. self.include_dirs.extend(py_include.split(os.path.pathsep)) if plat_py_include != py_include: - self.include_dirs.extend( - plat_py_include.split(os.path.pathsep)) + self.include_dirs.extend(plat_py_include.split(os.path.pathsep)) self.ensure_string_list('libraries') self.ensure_string_list('link_objects') @@ -220,9 +216,11 @@ class build_ext(Command): if sys.platform[:6] == 'cygwin': if not sysconfig.python_build: # building third party extensions - self.library_dirs.append(os.path.join(sys.prefix, "lib", - "python" + get_python_version(), - "config")) + self.library_dirs.append( + os.path.join( + sys.prefix, "lib", "python" + get_python_version(), "config" + ) + ) else: # building python standard extensions self.library_dirs.append('.') @@ -230,7 +228,7 @@ class build_ext(Command): # For building extensions with a shared Python library, # Python's library directory must be appended to library_dirs # See Issues: #1600860, #4366 - if (sysconfig.get_config_var('Py_ENABLE_SHARED')): + if sysconfig.get_config_var('Py_ENABLE_SHARED'): if not sysconfig.python_build: # building third party extensions self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) @@ -302,10 +300,12 @@ class build_ext(Command): # Setup the CCompiler object that we'll use to do all the # compiling and linking - self.compiler = new_compiler(compiler=self.compiler, - verbose=self.verbose, - dry_run=self.dry_run, - force=self.force) + self.compiler = new_compiler( + compiler=self.compiler, + verbose=self.verbose, + dry_run=self.dry_run, + force=self.force, + ) customize_compiler(self.compiler) # If we are cross-compiling, init the compiler now (if we are not # cross-compiling, init would not hurt, but people may rely on @@ -350,34 +350,40 @@ class build_ext(Command): """ if not isinstance(extensions, list): raise DistutilsSetupError( - "'ext_modules' option must be a list of Extension instances") + "'ext_modules' option must be a list of Extension instances" + ) for i, ext in enumerate(extensions): if isinstance(ext, Extension): - continue # OK! (assume type-checking done - # by Extension constructor) + continue # OK! (assume type-checking done + # by Extension constructor) if not isinstance(ext, tuple) or len(ext) != 2: raise DistutilsSetupError( - "each element of 'ext_modules' option must be an " - "Extension instance or 2-tuple") + "each element of 'ext_modules' option must be an " + "Extension instance or 2-tuple" + ) ext_name, build_info = ext - log.warn("old-style (ext_name, build_info) tuple found in " - "ext_modules for extension '%s' " - "-- please convert to Extension instance", ext_name) + log.warn( + "old-style (ext_name, build_info) tuple found in " + "ext_modules for extension '%s' " + "-- please convert to Extension instance", + ext_name, + ) - if not (isinstance(ext_name, str) and - extension_name_re.match(ext_name)): + if not (isinstance(ext_name, str) and extension_name_re.match(ext_name)): raise DistutilsSetupError( - "first element of each tuple in 'ext_modules' " - "must be the extension name (a string)") + "first element of each tuple in 'ext_modules' " + "must be the extension name (a string)" + ) if not isinstance(build_info, dict): raise DistutilsSetupError( - "second element of each tuple in 'ext_modules' " - "must be a dictionary (build info)") + "second element of each tuple in 'ext_modules' " + "must be a dictionary (build info)" + ) # OK, the (ext_name, build_info) dict is type-safe: convert it # to an Extension instance. @@ -385,9 +391,14 @@ class build_ext(Command): # Easy stuff: one-to-one mapping from dict elements to # instance attributes. - for key in ('include_dirs', 'library_dirs', 'libraries', - 'extra_objects', 'extra_compile_args', - 'extra_link_args'): + for key in ( + 'include_dirs', + 'library_dirs', + 'libraries', + 'extra_objects', + 'extra_compile_args', + 'extra_link_args', + ): val = build_info.get(key) if val is not None: setattr(ext, key, val) @@ -395,8 +406,7 @@ class build_ext(Command): # Medium-easy stuff: same syntax/semantics, different names. ext.runtime_library_dirs = build_info.get('rpath') if 'def_file' in build_info: - log.warn("'def_file' element of build info dict " - "no longer supported") + log.warn("'def_file' element of build info dict " "no longer supported") # Non-trivial stuff: 'macros' split into 'define_macros' # and 'undef_macros'. @@ -407,8 +417,9 @@ class build_ext(Command): for macro in macros: if not (isinstance(macro, tuple) and len(macro) in (1, 2)): raise DistutilsSetupError( - "'macros' element of build info dict " - "must be 1- or 2-tuple") + "'macros' element of build info dict " + "must be 1- or 2-tuple" + ) if len(macro) == 1: ext.undef_macros.append(macro[0]) elif len(macro) == 2: @@ -461,8 +472,9 @@ class build_ext(Command): return with ThreadPoolExecutor(max_workers=workers) as executor: - futures = [executor.submit(self.build_extension, ext) - for ext in self.extensions] + futures = [ + executor.submit(self.build_extension, ext) for ext in self.extensions + ] for ext, fut in zip(self.extensions, futures): with self._filter_build_errors(ext): fut.result() @@ -479,16 +491,16 @@ class build_ext(Command): except (CCompilerError, DistutilsError, CompileError) as e: if not ext.optional: raise - self.warn('building extension "%s" failed: %s' % - (ext.name, e)) + self.warn('building extension "%s" failed: %s' % (ext.name, e)) def build_extension(self, ext): sources = ext.sources if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError( - "in 'ext_modules' option (extension '%s'), " - "'sources' must be present and must be " - "a list of source filenames" % ext.name) + "in 'ext_modules' option (extension '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % ext.name + ) # sort to make the resulting .so file build reproducible sources = sorted(sources) @@ -525,13 +537,15 @@ class build_ext(Command): for undef in ext.undef_macros: macros.append((undef,)) - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=ext.include_dirs, - debug=self.debug, - extra_postargs=extra_args, - depends=ext.depends) + objects = self.compiler.compile( + sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=ext.include_dirs, + debug=self.debug, + extra_postargs=extra_args, + depends=ext.depends, + ) # XXX outdated variable, kept here in case third-part code # needs it. @@ -548,7 +562,8 @@ class build_ext(Command): language = ext.language or self.compiler.detect_language(sources) self.compiler.link_shared_object( - objects, ext_path, + objects, + ext_path, libraries=self.get_libraries(ext), library_dirs=ext.library_dirs, runtime_library_dirs=ext.runtime_library_dirs, @@ -556,7 +571,8 @@ class build_ext(Command): export_symbols=self.get_export_symbols(ext), debug=self.debug, build_temp=self.build_temp, - target_lang=language) + target_lang=language, + ) def swig_sources(self, sources, extension): """Walk the list of source files in 'sources', looking for SWIG @@ -576,15 +592,18 @@ class build_ext(Command): if self.swig_cpp: log.warn("--swig-cpp is deprecated - use --swig-opts=-c++") - if self.swig_cpp or ('-c++' in self.swig_opts) or \ - ('-c++' in extension.swig_opts): + if ( + self.swig_cpp + or ('-c++' in self.swig_opts) + or ('-c++' in extension.swig_opts) + ): target_ext = '.cpp' else: target_ext = '.c' for source in sources: (base, ext) = os.path.splitext(source) - if ext == ".i": # SWIG interface file + if ext == ".i": # SWIG interface file new_sources.append(base + '_wrap' + target_ext) swig_sources.append(source) swig_targets[source] = new_sources[-1] @@ -631,8 +650,9 @@ class build_ext(Command): return "swig.exe" else: raise DistutilsPlatformError( - "I don't know how to find (much less run) SWIG " - "on platform '%s'" % os.name) + "I don't know how to find (much less run) SWIG " + "on platform '%s'" % os.name + ) # -- Name generators ----------------------------------------------- # (extension names, filenames, whatever) @@ -650,7 +670,7 @@ class build_ext(Command): # no further work needed # returning : # build_dir/package/path/filename - filename = os.path.join(*modpath[:-1]+[filename]) + filename = os.path.join(*modpath[:-1] + [filename]) return os.path.join(self.build_lib, filename) # the inplace option requires to find the package directory @@ -678,6 +698,7 @@ class build_ext(Command): "foo\bar.pyd"). """ from distutils.sysconfig import get_config_var + ext_path = ext_name.split('.') ext_suffix = get_config_var('EXT_SUFFIX') return os.path.join(*ext_path) + ext_suffix @@ -715,12 +736,15 @@ class build_ext(Command): # Append '_d' to the python import library on debug builds. if sys.platform == "win32": from distutils._msvccompiler import MSVCCompiler + if not isinstance(self.compiler, MSVCCompiler): template = "python%d%d" if self.debug: template = template + '_d' - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + pythonlib = template % ( + sys.hexversion >> 24, + (sys.hexversion >> 16) & 0xFF, + ) # don't extend ext.libraries, it may be shared with other # extensions, it is a reference to the original list return ext.libraries + [pythonlib] @@ -734,6 +758,7 @@ class build_ext(Command): # Windows like MinGW) it is simply necessary that all symbols in # shared libraries are resolved at link time. from distutils.sysconfig import get_config_var + link_libpython = False if get_config_var('Py_ENABLE_SHARED'): # A native build on an Android device or on Cygwin diff --git a/setuptools/_distutils/command/build_py.py b/setuptools/_distutils/command/build_py.py index 7ef9bce..1b22004 100644 --- a/setuptools/_distutils/command/build_py.py +++ b/setuptools/_distutils/command/build_py.py @@ -12,7 +12,8 @@ from distutils.errors import * from distutils.util import convert_path from distutils import log -class build_py (Command): + +class build_py(Command): description = "\"build\" pure Python modules (copy to build directory)" @@ -20,14 +21,17 @@ class build_py (Command): ('build-lib=', 'd', "directory to \"build\" (copy) to"), ('compile', 'c', "compile .py to .pyc"), ('no-compile', None, "don't compile .py files [default]"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ( + 'optimize=', + 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]", + ), ('force', 'f', "forcibly build everything (ignore file timestamps)"), - ] + ] boolean_options = ['compile', 'force'] - negative_opt = {'no-compile' : 'compile'} + negative_opt = {'no-compile': 'compile'} def initialize_options(self): self.build_lib = None @@ -40,9 +44,9 @@ class build_py (Command): self.force = None def finalize_options(self): - self.set_undefined_options('build', - ('build_lib', 'build_lib'), - ('force', 'force')) + self.set_undefined_options( + 'build', ('build_lib', 'build_lib'), ('force', 'force') + ) # Get the distribution options that are aliases for build_py # options -- list of packages and list of modules. @@ -109,26 +113,26 @@ class build_py (Command): # Length of path to strip from found files plen = 0 if src_dir: - plen = len(src_dir)+1 + plen = len(src_dir) + 1 # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] + filenames = [file[plen:] for file in self.find_data_files(package, src_dir)] data.append((package, src_dir, build_dir, filenames)) return data def find_data_files(self, package, src_dir): """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) + globs = self.package_data.get('', []) + self.package_data.get(package, []) files = [] for pattern in globs: # Each pattern has to be converted to a platform-specific path - filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern))) + filelist = glob.glob( + os.path.join(glob.escape(src_dir), convert_path(pattern)) + ) # Files that match more than one pattern are only added once - files.extend([fn for fn in filelist if fn not in files - and os.path.isfile(fn)]) + files.extend( + [fn for fn in filelist if fn not in files and os.path.isfile(fn)] + ) return files def build_package_data(self): @@ -138,13 +142,14 @@ class build_py (Command): for filename in filenames: target = os.path.join(build_dir, filename) self.mkpath(os.path.dirname(target)) - self.copy_file(os.path.join(src_dir, filename), target, - preserve_mode=False) + self.copy_file( + os.path.join(src_dir, filename), target, preserve_mode=False + ) def get_package_dir(self, package): """Return the directory, relative to the top of the source - distribution, where package 'package' should be found - (at least according to the 'package_dir' option, if any).""" + distribution, where package 'package' should be found + (at least according to the 'package_dir' option, if any).""" path = package.split('.') if not self.package_dir: @@ -188,11 +193,13 @@ class build_py (Command): if package_dir != "": if not os.path.exists(package_dir): raise DistutilsFileError( - "package directory '%s' does not exist" % package_dir) + "package directory '%s' does not exist" % package_dir + ) if not os.path.isdir(package_dir): raise DistutilsFileError( - "supposed package directory '%s' exists, " - "but is not a directory" % package_dir) + "supposed package directory '%s' exists, " + "but is not a directory" % package_dir + ) # Require __init__.py for all but the "root package" if package: @@ -200,8 +207,10 @@ class build_py (Command): if os.path.isfile(init_py): return init_py else: - log.warn(("package init file '%s' not found " + - "(or not a regular file)"), init_py) + log.warn( + ("package init file '%s' not found " + "(or not a regular file)"), + init_py, + ) # Either not in a package at all (__init__.py not expected), or # __init__.py doesn't exist -- so don't return the filename. @@ -313,17 +322,21 @@ class build_py (Command): outputs.append(filename) if include_bytecode: if self.compile: - outputs.append(importlib.util.cache_from_source( - filename, optimization='')) + outputs.append( + importlib.util.cache_from_source(filename, optimization='') + ) if self.optimize > 0: - outputs.append(importlib.util.cache_from_source( - filename, optimization=self.optimize)) + outputs.append( + importlib.util.cache_from_source( + filename, optimization=self.optimize + ) + ) outputs += [ os.path.join(build_dir, filename) for package, src_dir, build_dir, filenames in self.data_files for filename in filenames - ] + ] return outputs @@ -332,7 +345,8 @@ class build_py (Command): package = package.split('.') elif not isinstance(package, (list, tuple)): raise TypeError( - "'package' must be a string (dot-separated), list, or tuple") + "'package' must be a string (dot-separated), list, or tuple" + ) # Now put the module source file into the "build" area -- this is # easy, we just copy it somewhere under self.build_lib (the build @@ -377,6 +391,7 @@ class build_py (Command): return from distutils.util import byte_compile + prefix = self.build_lib if prefix[-1] != os.sep: prefix = prefix + os.sep @@ -385,8 +400,14 @@ class build_py (Command): # method of the "install_lib" command, except for the determination # of the 'prefix' string. Hmmm. if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=prefix, dry_run=self.dry_run) + byte_compile( + files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run + ) if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=prefix, dry_run=self.dry_run) + byte_compile( + files, + optimize=self.optimize, + force=self.force, + prefix=prefix, + dry_run=self.dry_run, + ) diff --git a/setuptools/_distutils/command/build_scripts.py b/setuptools/_distutils/command/build_scripts.py index e56511d..17058db 100644 --- a/setuptools/_distutils/command/build_scripts.py +++ b/setuptools/_distutils/command/build_scripts.py @@ -29,7 +29,7 @@ class build_scripts(Command): ('build-dir=', 'd', "directory to \"build\" (copy) to"), ('force', 'f', "forcibly build everything (ignore file timestamps"), ('executable=', 'e', "specify final destination interpreter path"), - ] + ] boolean_options = ['force'] @@ -40,10 +40,12 @@ class build_scripts(Command): self.executable = None def finalize_options(self): - self.set_undefined_options('build', - ('build_scripts', 'build_dir'), - ('force', 'force'), - ('executable', 'executable')) + self.set_undefined_options( + 'build', + ('build_scripts', 'build_dir'), + ('force', 'force'), + ('executable', 'executable'), + ) self.scripts = self.distribution.scripts def get_source_files(self): @@ -101,17 +103,19 @@ class build_scripts(Command): updated_files.append(outfile) if shebang_match: - log.info("copying and adjusting %s -> %s", script, - self.build_dir) + log.info("copying and adjusting %s -> %s", script, self.build_dir) if not self.dry_run: if not sysconfig.python_build: executable = self.executable else: executable = os.path.join( sysconfig.get_config_var("BINDIR"), - "python%s%s" % ( + "python%s%s" + % ( sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"))) + sysconfig.get_config_var("EXE"), + ), + ) post_interp = shebang_match.group(1) or '' shebang = "#!" + executable + post_interp + "\n" self._validate_shebang(shebang, f.encoding) @@ -140,8 +144,7 @@ class build_scripts(Command): oldmode = os.stat(file)[ST_MODE] & 0o7777 newmode = (oldmode | 0o555) & 0o7777 if newmode != oldmode: - log.info("changing mode of %s from %o to %o", - file, oldmode, newmode) + log.info("changing mode of %s from %o to %o", file, oldmode, newmode) os.chmod(file, newmode) @staticmethod @@ -155,8 +158,8 @@ class build_scripts(Command): shebang.encode('utf-8') except UnicodeEncodeError: raise ValueError( - "The shebang ({!r}) is not encodable " - "to utf-8".format(shebang)) + "The shebang ({!r}) is not encodable " "to utf-8".format(shebang) + ) # If the script is encoded to a custom encoding (use a # #coding:xxx cookie), the shebang has to be encodable to @@ -166,5 +169,5 @@ class build_scripts(Command): except UnicodeEncodeError: raise ValueError( "The shebang ({!r}) is not encodable " - "to the script encoding ({})" - .format(shebang, encoding)) + "to the script encoding ({})".format(shebang, encoding) + ) diff --git a/setuptools/_distutils/command/check.py b/setuptools/_distutils/command/check.py index 8a02dbc..176a8b8 100644 --- a/setuptools/_distutils/command/check.py +++ b/setuptools/_distutils/command/check.py @@ -15,18 +15,26 @@ try: from docutils import nodes class SilentReporter(Reporter): - - def __init__(self, source, report_level, halt_level, stream=None, - debug=0, encoding='ascii', error_handler='replace'): + def __init__( + self, + source, + report_level, + halt_level, + stream=None, + debug=0, + encoding='ascii', + error_handler='replace', + ): self.messages = [] - super().__init__(source, report_level, halt_level, stream, - debug, encoding, error_handler) + super().__init__( + source, report_level, halt_level, stream, debug, encoding, error_handler + ) def system_message(self, level, message, *children, **kwargs): self.messages.append((level, message, children, kwargs)) - return nodes.system_message(message, level=level, - type=self.levels[level], - *children, **kwargs) + return nodes.system_message( + message, level=level, type=self.levels[level], *children, **kwargs + ) HAS_DOCUTILS = True except Exception: @@ -34,16 +42,23 @@ except Exception: # indicate that docutils is not ported to Py3k. HAS_DOCUTILS = False + class check(Command): - """This command checks the meta-data of the package. - """ - description = ("perform some checks on the package") - user_options = [('metadata', 'm', 'Verify meta-data'), - ('restructuredtext', 'r', - ('Checks if long string meta-data syntax ' - 'are reStructuredText-compliant')), - ('strict', 's', - 'Will exit with an error if a check fails')] + """This command checks the meta-data of the package.""" + + description = "perform some checks on the package" + user_options = [ + ('metadata', 'm', 'Verify meta-data'), + ( + 'restructuredtext', + 'r', + ( + 'Checks if long string meta-data syntax ' + 'are reStructuredText-compliant' + ), + ), + ('strict', 's', 'Will exit with an error if a check fails'), + ] boolean_options = ['metadata', 'restructuredtext', 'strict'] @@ -116,13 +131,15 @@ class check(Command): settings.tab_width = 4 settings.pep_references = None settings.rfc_references = None - reporter = SilentReporter(source_path, - settings.report_level, - settings.halt_level, - stream=settings.warning_stream, - debug=settings.debug, - encoding=settings.error_encoding, - error_handler=settings.error_encoding_error_handler) + reporter = SilentReporter( + source_path, + settings.report_level, + settings.halt_level, + stream=settings.warning_stream, + debug=settings.debug, + encoding=settings.error_encoding, + error_handler=settings.error_encoding_error_handler, + ) document = nodes.document(settings, reporter, source=source_path) document.note_source(source_path, -1) @@ -130,6 +147,7 @@ class check(Command): parser.parse(data, document) except AttributeError as e: reporter.messages.append( - (-1, 'Could not finish the parsing: %s.' % e, '', {})) + (-1, 'Could not finish the parsing: %s.' % e, '', {}) + ) return reporter.messages diff --git a/setuptools/_distutils/command/clean.py b/setuptools/_distutils/command/clean.py index 0cb2701..b731b60 100644 --- a/setuptools/_distutils/command/clean.py +++ b/setuptools/_distutils/command/clean.py @@ -9,22 +9,25 @@ from distutils.core import Command from distutils.dir_util import remove_tree from distutils import log + class clean(Command): description = "clean up temporary files from 'build' command" user_options = [ - ('build-base=', 'b', - "base build directory (default: 'build.build-base')"), - ('build-lib=', None, - "build directory for all modules (default: 'build.build-lib')"), - ('build-temp=', 't', - "temporary build directory (default: 'build.build-temp')"), - ('build-scripts=', None, - "build directory for scripts (default: 'build.build-scripts')"), - ('bdist-base=', None, - "temporary directory for built distributions"), - ('all', 'a', - "remove all build output, not just temporary by-products") + ('build-base=', 'b', "base build directory (default: 'build.build-base')"), + ( + 'build-lib=', + None, + "build directory for all modules (default: 'build.build-lib')", + ), + ('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"), + ( + 'build-scripts=', + None, + "build directory for scripts (default: 'build.build-scripts')", + ), + ('bdist-base=', None, "temporary directory for built distributions"), + ('all', 'a', "remove all build output, not just temporary by-products"), ] boolean_options = ['all'] @@ -38,13 +41,14 @@ class clean(Command): self.all = None def finalize_options(self): - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib'), - ('build_scripts', 'build_scripts'), - ('build_temp', 'build_temp')) - self.set_undefined_options('bdist', - ('bdist_base', 'bdist_base')) + self.set_undefined_options( + 'build', + ('build_base', 'build_base'), + ('build_lib', 'build_lib'), + ('build_scripts', 'build_scripts'), + ('build_temp', 'build_temp'), + ) + self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) def run(self): # remove the build/temp. directory (unless it's already @@ -52,19 +56,15 @@ class clean(Command): if os.path.exists(self.build_temp): remove_tree(self.build_temp, dry_run=self.dry_run) else: - log.debug("'%s' does not exist -- can't clean it", - self.build_temp) + log.debug("'%s' does not exist -- can't clean it", self.build_temp) if self.all: # remove build directories - for directory in (self.build_lib, - self.bdist_base, - self.build_scripts): + for directory in (self.build_lib, self.bdist_base, self.build_scripts): if os.path.exists(directory): remove_tree(directory, dry_run=self.dry_run) else: - log.warn("'%s' does not exist -- can't clean it", - directory) + log.warn("'%s' does not exist -- can't clean it", directory) # just for the heck of it, try to remove the base build directory: # we might have emptied it right now, but if not we don't care diff --git a/setuptools/_distutils/command/config.py b/setuptools/_distutils/command/config.py index aeda408..73de1d3 100644 --- a/setuptools/_distutils/command/config.py +++ b/setuptools/_distutils/command/config.py @@ -18,32 +18,26 @@ from distutils import log LANG_EXT = {"c": ".c", "c++": ".cxx"} + class config(Command): description = "prepare to build" user_options = [ - ('compiler=', None, - "specify the compiler type"), - ('cc=', None, - "specify the compiler executable"), - ('include-dirs=', 'I', - "list of directories to search for header files"), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries"), - - ('noisy', None, - "show every action (compile, link, run, ...) taken"), - ('dump-source', None, - "dump generated source files before attempting to compile them"), - ] - + ('compiler=', None, "specify the compiler type"), + ('cc=', None, "specify the compiler executable"), + ('include-dirs=', 'I', "list of directories to search for header files"), + ('define=', 'D', "C preprocessor macros to define"), + ('undef=', 'U', "C preprocessor macros to undefine"), + ('libraries=', 'l', "external C libraries to link with"), + ('library-dirs=', 'L', "directories to search for external C libraries"), + ('noisy', None, "show every action (compile, link, run, ...) taken"), + ( + 'dump-source', + None, + "dump generated source files before attempting to compile them", + ), + ] # The three standard command methods: since the "config" command # does nothing by default, these are empty. @@ -93,9 +87,11 @@ class config(Command): # We do this late, and only on-demand, because this is an expensive # import. from distutils.ccompiler import CCompiler, new_compiler + if not isinstance(self.compiler, CCompiler): - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, force=1) + self.compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=1 + ) customize_compiler(self.compiler) if self.include_dirs: self.compiler.set_include_dirs(self.include_dirs) @@ -132,14 +128,16 @@ class config(Command): self.compiler.compile([src], include_dirs=include_dirs) return (src, obj) - def _link(self, body, headers, include_dirs, libraries, library_dirs, - lang): + def _link(self, body, headers, include_dirs, libraries, library_dirs, lang): (src, obj) = self._compile(body, headers, include_dirs, lang) prog = os.path.splitext(os.path.basename(src))[0] - self.compiler.link_executable([obj], prog, - libraries=libraries, - library_dirs=library_dirs, - target_lang=lang) + self.compiler.link_executable( + [obj], + prog, + libraries=libraries, + library_dirs=library_dirs, + target_lang=lang, + ) if self.compiler.exe_extension is not None: prog = prog + self.compiler.exe_extension @@ -158,7 +156,6 @@ class config(Command): except OSError: pass - # XXX these ignore the dry-run flag: what to do, what to do? even if # you want a dry-run build, you still need some sort of configuration # info. My inclination is to make it up to the real config command to @@ -177,6 +174,7 @@ class config(Command): ('body' probably isn't of much use, but what the heck.) """ from distutils.ccompiler import CompileError + self._check_compiler() ok = True try: @@ -187,8 +185,7 @@ class config(Command): self._clean() return ok - def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, - lang="c"): + def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang="c"): """Construct a source file (just like 'try_cpp()'), run it through the preprocessor, and return true if any line of the output matches 'pattern'. 'pattern' should either be a compiled regex object or a @@ -220,6 +217,7 @@ class config(Command): Return true on success, false otherwise. """ from distutils.ccompiler import CompileError + self._check_compiler() try: self._compile(body, headers, include_dirs, lang) @@ -231,17 +229,24 @@ class config(Command): self._clean() return ok - def try_link(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): + def try_link( + self, + body, + headers=None, + include_dirs=None, + libraries=None, + library_dirs=None, + lang="c", + ): """Try to compile and link a source file, built from 'body' and 'headers', to executable form. Return true on success, false otherwise. """ from distutils.ccompiler import CompileError, LinkError + self._check_compiler() try: - self._link(body, headers, include_dirs, - libraries, library_dirs, lang) + self._link(body, headers, include_dirs, libraries, library_dirs, lang) ok = True except (CompileError, LinkError): ok = False @@ -250,17 +255,26 @@ class config(Command): self._clean() return ok - def try_run(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): + def try_run( + self, + body, + headers=None, + include_dirs=None, + libraries=None, + library_dirs=None, + lang="c", + ): """Try to compile, link to an executable, and run a program built from 'body' and 'headers'. Return true on success, false otherwise. """ from distutils.ccompiler import CompileError, LinkError + self._check_compiler() try: - src, obj, exe = self._link(body, headers, include_dirs, - libraries, library_dirs, lang) + src, obj, exe = self._link( + body, headers, include_dirs, libraries, library_dirs, lang + ) self.spawn([exe]) ok = True except (CompileError, LinkError, DistutilsExecError): @@ -270,13 +284,20 @@ class config(Command): self._clean() return ok - # -- High-level methods -------------------------------------------- # (these are the ones that are actually likely to be useful # when implementing a real-world config command!) - def check_func(self, func, headers=None, include_dirs=None, - libraries=None, library_dirs=None, decl=0, call=0): + def check_func( + self, + func, + headers=None, + include_dirs=None, + libraries=None, + library_dirs=None, + decl=0, + call=0, + ): """Determine if function 'func' is available by constructing a source file that refers to 'func', and compiles and links it. If everything succeeds, returns true; otherwise returns false. @@ -302,11 +323,16 @@ class config(Command): body.append("}") body = "\n".join(body) + "\n" - return self.try_link(body, headers, include_dirs, - libraries, library_dirs) + return self.try_link(body, headers, include_dirs, libraries, library_dirs) - def check_lib(self, library, library_dirs=None, headers=None, - include_dirs=None, other_libraries=[]): + def check_lib( + self, + library, + library_dirs=None, + headers=None, + include_dirs=None, + other_libraries=[], + ): """Determine if 'library' is available to be linked against, without actually checking that any particular symbols are provided by it. 'headers' will be used in constructing the source file to @@ -316,17 +342,23 @@ class config(Command): has symbols that depend on other libraries. """ self._check_compiler() - return self.try_link("int main (void) { }", headers, include_dirs, - [library] + other_libraries, library_dirs) - - def check_header(self, header, include_dirs=None, library_dirs=None, - lang="c"): + return self.try_link( + "int main (void) { }", + headers, + include_dirs, + [library] + other_libraries, + library_dirs, + ) + + def check_header(self, header, include_dirs=None, library_dirs=None, lang="c"): """Determine if the system header file named by 'header_file' exists and can be found by the preprocessor; return true if so, false otherwise. """ - return self.try_cpp(body="/* No body */", headers=[header], - include_dirs=include_dirs) + return self.try_cpp( + body="/* No body */", headers=[header], include_dirs=include_dirs + ) + def dump_file(filename, head=None): """Dumps a file content into log.info. diff --git a/setuptools/_distutils/command/install.py b/setuptools/_distutils/command/install.py index 41c17d8..0660406 100644 --- a/setuptools/_distutils/command/install.py +++ b/setuptools/_distutils/command/install.py @@ -21,6 +21,7 @@ from .. import _collections from site import USER_BASE from site import USER_SITE + HAS_USER_SITE = True WINDOWS_SCHEME = { @@ -28,7 +29,7 @@ WINDOWS_SCHEME = { 'platlib': '{base}/Lib/site-packages', 'headers': '{base}/Include/{dist_name}', 'scripts': '{base}/Scripts', - 'data' : '{base}', + 'data': '{base}', } INSTALL_SCHEMES = { @@ -37,31 +38,31 @@ INSTALL_SCHEMES = { 'platlib': '{platbase}/{platlibdir}/{implementation_lower}{py_version_short}/site-packages', 'headers': '{base}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}', 'scripts': '{base}/bin', - 'data' : '{base}', - }, + 'data': '{base}', + }, 'posix_home': { 'purelib': '{base}/lib/{implementation_lower}', 'platlib': '{base}/{platlibdir}/{implementation_lower}', 'headers': '{base}/include/{implementation_lower}/{dist_name}', 'scripts': '{base}/bin', - 'data' : '{base}', - }, + 'data': '{base}', + }, 'nt': WINDOWS_SCHEME, 'pypy': { 'purelib': '{base}/site-packages', 'platlib': '{base}/site-packages', 'headers': '{base}/include/{dist_name}', 'scripts': '{base}/bin', - 'data' : '{base}', - }, + 'data': '{base}', + }, 'pypy_nt': { 'purelib': '{base}/site-packages', 'platlib': '{base}/site-packages', 'headers': '{base}/include/{dist_name}', 'scripts': '{base}/Scripts', - 'data' : '{base}', - }, - } + 'data': '{base}', + }, +} # user site schemes if HAS_USER_SITE: @@ -70,17 +71,16 @@ if HAS_USER_SITE: 'platlib': '{usersite}', 'headers': '{userbase}/{implementation}{py_version_nodot_plat}/Include/{dist_name}', 'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts', - 'data' : '{userbase}', - } + 'data': '{userbase}', + } INSTALL_SCHEMES['posix_user'] = { 'purelib': '{usersite}', 'platlib': '{usersite}', - 'headers': - '{userbase}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}', + 'headers': '{userbase}/include/{implementation_lower}{py_version_short}{abiflags}/{dist_name}', 'scripts': '{userbase}/bin', - 'data' : '{userbase}', - } + 'data': '{userbase}', + } # The keys to an installation scheme; if any new types of files are to be # installed, be sure to add an entry to every installation scheme above, @@ -128,11 +128,7 @@ def _remove_set(ob, attrs): """ Include only attrs that are None in ob. """ - return { - key: value - for key, value in attrs.items() - if getattr(ob, key) is None - } + return {key: value for key, value in attrs.items() if getattr(ob, key) is None} def _resolve_scheme(name): @@ -164,10 +160,7 @@ def _inject_headers(name, scheme): def _scheme_attrs(scheme): """Resolve install directories by applying the install schemes.""" - return { - f'install_{key}': scheme[key] - for key in SCHEME_KEYS - } + return {f'install_{key}': scheme[key] for key in SCHEME_KEYS} def _pypy_hack(name): @@ -184,72 +177,73 @@ class install(Command): user_options = [ # Select installation scheme and set base director(y|ies) - ('prefix=', None, - "installation prefix"), - ('exec-prefix=', None, - "(Unix only) prefix for platform-specific files"), - ('home=', None, - "(Unix only) home directory to install under"), - + ('prefix=', None, "installation prefix"), + ('exec-prefix=', None, "(Unix only) prefix for platform-specific files"), + ('home=', None, "(Unix only) home directory to install under"), # Or, just set the base director(y|ies) - ('install-base=', None, - "base installation directory (instead of --prefix or --home)"), - ('install-platbase=', None, - "base installation directory for platform-specific files " + - "(instead of --exec-prefix or --home)"), - ('root=', None, - "install everything relative to this alternate root directory"), - + ( + 'install-base=', + None, + "base installation directory (instead of --prefix or --home)", + ), + ( + 'install-platbase=', + None, + "base installation directory for platform-specific files " + + "(instead of --exec-prefix or --home)", + ), + ('root=', None, "install everything relative to this alternate root directory"), # Or, explicitly set the installation scheme - ('install-purelib=', None, - "installation directory for pure Python module distributions"), - ('install-platlib=', None, - "installation directory for non-pure module distributions"), - ('install-lib=', None, - "installation directory for all module distributions " + - "(overrides --install-purelib and --install-platlib)"), - - ('install-headers=', None, - "installation directory for C/C++ headers"), - ('install-scripts=', None, - "installation directory for Python scripts"), - ('install-data=', None, - "installation directory for data files"), - + ( + 'install-purelib=', + None, + "installation directory for pure Python module distributions", + ), + ( + 'install-platlib=', + None, + "installation directory for non-pure module distributions", + ), + ( + 'install-lib=', + None, + "installation directory for all module distributions " + + "(overrides --install-purelib and --install-platlib)", + ), + ('install-headers=', None, "installation directory for C/C++ headers"), + ('install-scripts=', None, "installation directory for Python scripts"), + ('install-data=', None, "installation directory for data files"), # Byte-compilation options -- see install_lib.py for details, as # these are duplicated from there (but only install_lib does # anything with them). ('compile', 'c', "compile .py to .pyc [default]"), ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - + ( + 'optimize=', + 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]", + ), # Miscellaneous control options - ('force', 'f', - "force installation (overwrite any existing files)"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - + ('force', 'f', "force installation (overwrite any existing files)"), + ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), # Where to install documentation (eventually!) - #('doc-format=', None, "format of documentation to generate"), - #('install-man=', None, "directory for Unix man pages"), - #('install-html=', None, "directory for HTML documentation"), - #('install-info=', None, "directory for GNU info files"), - - ('record=', None, - "filename in which to record list of installed files"), - ] + # ('doc-format=', None, "format of documentation to generate"), + # ('install-man=', None, "directory for Unix man pages"), + # ('install-html=', None, "directory for HTML documentation"), + # ('install-info=', None, "directory for GNU info files"), + ('record=', None, "filename in which to record list of installed files"), + ] boolean_options = ['compile', 'force', 'skip-build'] if HAS_USER_SITE: - user_options.append(('user', None, - "install in user site-package '%s'" % USER_SITE)) + user_options.append( + ('user', None, "install in user site-package '%s'" % USER_SITE) + ) boolean_options.append('user') - negative_opt = {'no-compile' : 'compile'} - + negative_opt = {'no-compile': 'compile'} def initialize_options(self): """Initializes options.""" @@ -271,10 +265,10 @@ class install(Command): # supplied by the user, they are filled in using the installation # scheme implied by prefix/exec-prefix/home and the contents of # that installation scheme. - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib self.install_scripts = None self.install_data = None self.install_userbase = USER_BASE @@ -316,13 +310,12 @@ class install(Command): # Not defined yet because we don't know anything about # documentation yet. - #self.install_man = None - #self.install_html = None - #self.install_info = None + # self.install_man = None + # self.install_html = None + # self.install_info = None self.record = None - # -- Option finalizing methods ------------------------------------- # (This is rather more involved than for most commands, # because this is where the policy for installing third- @@ -345,20 +338,30 @@ class install(Command): # Check for errors/inconsistencies in the options; first, stuff # that's wrong on any platform. - if ((self.prefix or self.exec_prefix or self.home) and - (self.install_base or self.install_platbase)): + if (self.prefix or self.exec_prefix or self.home) and ( + self.install_base or self.install_platbase + ): raise DistutilsOptionError( - "must supply either prefix/exec-prefix/home or " + - "install-base/install-platbase -- not both") + "must supply either prefix/exec-prefix/home or " + + "install-base/install-platbase -- not both" + ) if self.home and (self.prefix or self.exec_prefix): raise DistutilsOptionError( - "must supply either home or prefix/exec-prefix -- not both") + "must supply either home or prefix/exec-prefix -- not both" + ) - if self.user and (self.prefix or self.exec_prefix or self.home or - self.install_base or self.install_platbase): - raise DistutilsOptionError("can't combine user with prefix, " - "exec_prefix/home, or install_(plat)base") + if self.user and ( + self.prefix + or self.exec_prefix + or self.home + or self.install_base + or self.install_platbase + ): + raise DistutilsOptionError( + "can't combine user with prefix, " + "exec_prefix/home, or install_(plat)base" + ) # Next, stuff that's wrong (or dubious) only on certain platforms. if os.name != "posix": @@ -423,7 +426,8 @@ class install(Command): local_vars['usersite'] = self.install_usersite self.config_vars = _collections.DictStack( - [compat_vars, sysconfig.get_config_vars(), local_vars]) + [compat_vars, sysconfig.get_config_vars(), local_vars] + ) self.expand_basedirs() @@ -436,6 +440,7 @@ class install(Command): if DEBUG: from pprint import pprint + print("config vars:") pprint(dict(self.config_vars)) @@ -454,17 +459,23 @@ class install(Command): # module distribution is pure or not. Of course, if the user # already specified install_lib, use their selection. if self.install_lib is None: - if self.distribution.has_ext_modules(): # has extensions: non-pure + if self.distribution.has_ext_modules(): # has extensions: non-pure self.install_lib = self.install_platlib else: self.install_lib = self.install_purelib - # Convert directories from Unix /-separated syntax to the local # convention. - self.convert_paths('lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers', - 'userbase', 'usersite') + self.convert_paths( + 'lib', + 'purelib', + 'platlib', + 'scripts', + 'data', + 'headers', + 'userbase', + 'usersite', + ) # Deprecated # Well, we're not actually fully completely finalized yet: we still @@ -472,21 +483,22 @@ class install(Command): # non-packagized module distributions (hello, Numerical Python!) to # get their own directories. self.handle_extra_path() - self.install_libbase = self.install_lib # needed for .pth file + self.install_libbase = self.install_lib # needed for .pth file self.install_lib = os.path.join(self.install_lib, self.extra_dirs) # If a new root directory was supplied, make all the installation # dirs relative to it. if self.root is not None: - self.change_roots('libbase', 'lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers') + self.change_roots( + 'libbase', 'lib', 'purelib', 'platlib', 'scripts', 'data', 'headers' + ) self.dump_dirs("after prepending root") # Find out the build directories, ie. where to install from. - self.set_undefined_options('build', - ('build_base', 'build_base'), - ('build_lib', 'build_lib')) + self.set_undefined_options( + 'build', ('build_base', 'build_base'), ('build_lib', 'build_lib') + ) # Punt on doc directories for now -- after all, we're punting on # documentation completely! @@ -496,6 +508,7 @@ class install(Command): if not DEBUG: return from distutils.fancy_getopt import longopt_xlate + log.debug(msg + ":") for opt in self.user_options: opt_name = opt[0] @@ -515,24 +528,24 @@ class install(Command): if self.install_base is not None or self.install_platbase is not None: incomplete_scheme = ( ( - self.install_lib is None and - self.install_purelib is None and - self.install_platlib is None - ) or - self.install_headers is None or - self.install_scripts is None or - self.install_data is None + self.install_lib is None + and self.install_purelib is None + and self.install_platlib is None + ) + or self.install_headers is None + or self.install_scripts is None + or self.install_data is None ) if incomplete_scheme: raise DistutilsOptionError( - "install-base or install-platbase supplied, but " - "installation scheme is incomplete") + "install-base or install-platbase supplied, but " + "installation scheme is incomplete" + ) return if self.user: if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") + raise DistutilsPlatformError("User base directory is not specified") self.install_base = self.install_platbase = self.install_userbase self.select_scheme("posix_user") elif self.home is not None: @@ -542,15 +555,14 @@ class install(Command): if self.prefix is None: if self.exec_prefix is not None: raise DistutilsOptionError( - "must not supply exec-prefix without prefix") + "must not supply exec-prefix without prefix" + ) # Allow Fedora to add components to the prefix _prefix_addition = getattr(sysconfig, '_prefix_addition', "") - self.prefix = ( - os.path.normpath(sys.prefix) + _prefix_addition) - self.exec_prefix = ( - os.path.normpath(sys.exec_prefix) + _prefix_addition) + self.prefix = os.path.normpath(sys.prefix) + _prefix_addition + self.exec_prefix = os.path.normpath(sys.exec_prefix) + _prefix_addition else: if self.exec_prefix is None: @@ -564,8 +576,7 @@ class install(Command): """Finalizes options for non-posix platforms""" if self.user: if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") + raise DistutilsPlatformError("User base directory is not specified") self.install_base = self.install_platbase = self.install_userbase self.select_scheme(os.name + "_user") elif self.home is not None: @@ -580,7 +591,8 @@ class install(Command): self.select_scheme(os.name) except KeyError: raise DistutilsPlatformError( - "I don't know how to install stuff on '%s'" % os.name) + "I don't know how to install stuff on '%s'" % os.name + ) def select_scheme(self, name): _select_scheme(self, name) @@ -601,9 +613,16 @@ class install(Command): def expand_dirs(self): """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data',]) + self._expand_attrs( + [ + 'install_purelib', + 'install_platlib', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + ] + ) def convert_paths(self, *names): """Call `convert_path` over `names`.""" @@ -630,8 +649,9 @@ class install(Command): path_file, extra_dirs = self.extra_path else: raise DistutilsOptionError( - "'extra_path' option must be a list, tuple, or " - "comma-separated string with 1 or 2 elements") + "'extra_path' option must be a list, tuple, or " + "comma-separated string with 1 or 2 elements" + ) # convert to local form in case Unix notation used (as it # should be in setup scripts) @@ -674,8 +694,7 @@ class install(Command): # internally, and not to sys.path, so we don't check the platform # matches what we are running. if self.warn_dir and build_plat != get_platform(): - raise DistutilsPlatformError("Can't install when " - "cross-compiling") + raise DistutilsPlatformError("Can't install when " "cross-compiling") # Run all sub-commands (at least those that need to be run) for cmd_name in self.get_sub_commands(): @@ -687,38 +706,43 @@ class install(Command): # write list of installed files, if requested. if self.record: outputs = self.get_outputs() - if self.root: # strip any package prefix + if self.root: # strip any package prefix root_len = len(self.root) for counter in range(len(outputs)): outputs[counter] = outputs[counter][root_len:] - self.execute(write_file, - (self.record, outputs), - "writing list of installed files to '%s'" % - self.record) + self.execute( + write_file, + (self.record, outputs), + "writing list of installed files to '%s'" % self.record, + ) sys_path = map(os.path.normpath, sys.path) sys_path = map(os.path.normcase, sys_path) install_lib = os.path.normcase(os.path.normpath(self.install_lib)) - if (self.warn_dir and - not (self.path_file and self.install_path_file) and - install_lib not in sys_path): - log.debug(("modules installed to '%s', which is not in " - "Python's module search path (sys.path) -- " - "you'll have to change the search path yourself"), - self.install_lib) + if ( + self.warn_dir + and not (self.path_file and self.install_path_file) + and install_lib not in sys_path + ): + log.debug( + ( + "modules installed to '%s', which is not in " + "Python's module search path (sys.path) -- " + "you'll have to change the search path yourself" + ), + self.install_lib, + ) def create_path_file(self): """Creates the .pth file""" - filename = os.path.join(self.install_libbase, - self.path_file + ".pth") + filename = os.path.join(self.install_libbase, self.path_file + ".pth") if self.install_path_file: - self.execute(write_file, - (filename, [self.extra_dirs]), - "creating %s" % filename) + self.execute( + write_file, (filename, [self.extra_dirs]), "creating %s" % filename + ) else: self.warn("path file '%s' not created" % filename) - # -- Reporting methods --------------------------------------------- def get_outputs(self): @@ -733,8 +757,7 @@ class install(Command): outputs.append(filename) if self.path_file and self.install_path_file: - outputs.append(os.path.join(self.install_libbase, - self.path_file + ".pth")) + outputs.append(os.path.join(self.install_libbase, self.path_file + ".pth")) return outputs @@ -753,8 +776,9 @@ class install(Command): def has_lib(self): """Returns true if the current distribution has any Python modules to install.""" - return (self.distribution.has_pure_modules() or - self.distribution.has_ext_modules()) + return ( + self.distribution.has_pure_modules() or self.distribution.has_ext_modules() + ) def has_headers(self): """Returns true if the current distribution has any headers to @@ -773,9 +797,10 @@ class install(Command): # 'sub_commands': a list of commands this command might have to run to # get its work done. See cmd.py for more info. - sub_commands = [('install_lib', has_lib), - ('install_headers', has_headers), - ('install_scripts', has_scripts), - ('install_data', has_data), - ('install_egg_info', lambda self:True), - ] + sub_commands = [ + ('install_lib', has_lib), + ('install_headers', has_headers), + ('install_scripts', has_scripts), + ('install_data', has_data), + ('install_egg_info', lambda self: True), + ] diff --git a/setuptools/_distutils/command/install_data.py b/setuptools/_distutils/command/install_data.py index 947cd76..23d91ad 100644 --- a/setuptools/_distutils/command/install_data.py +++ b/setuptools/_distutils/command/install_data.py @@ -9,18 +9,21 @@ import os from distutils.core import Command from distutils.util import change_root, convert_path + class install_data(Command): description = "install data files" user_options = [ - ('install-dir=', 'd', - "base directory for installing data files " - "(default: installation base dir)"), - ('root=', None, - "install everything relative to this alternate root directory"), + ( + 'install-dir=', + 'd', + "base directory for installing data files " + "(default: installation base dir)", + ), + ('root=', None, "install everything relative to this alternate root directory"), ('force', 'f', "force installation (overwrite existing files)"), - ] + ] boolean_options = ['force'] @@ -33,11 +36,12 @@ class install_data(Command): self.warn_dir = 1 def finalize_options(self): - self.set_undefined_options('install', - ('install_data', 'install_dir'), - ('root', 'root'), - ('force', 'force'), - ) + self.set_undefined_options( + 'install', + ('install_data', 'install_dir'), + ('root', 'root'), + ('force', 'force'), + ) def run(self): self.mkpath(self.install_dir) @@ -46,9 +50,10 @@ class install_data(Command): # it's a simple file, so copy it f = convert_path(f) if self.warn_dir: - self.warn("setup script did not provide a directory for " - "'%s' -- installing right in '%s'" % - (f, self.install_dir)) + self.warn( + "setup script did not provide a directory for " + "'%s' -- installing right in '%s'" % (f, self.install_dir) + ) (out, _) = self.copy_file(f, self.install_dir) self.outfiles.append(out) else: diff --git a/setuptools/_distutils/command/install_egg_info.py b/setuptools/_distutils/command/install_egg_info.py index adc0323..dc93963 100644 --- a/setuptools/_distutils/command/install_egg_info.py +++ b/setuptools/_distutils/command/install_egg_info.py @@ -8,6 +8,7 @@ from distutils.cmd import Command from distutils import log, dir_util import os, sys, re + class install_egg_info(Command): """Install an .egg-info file for the package""" @@ -28,11 +29,11 @@ class install_egg_info(Command): return "%s-%s-py%d.%d.egg-info" % ( to_filename(safe_name(self.distribution.get_name())), to_filename(safe_version(self.distribution.get_version())), - *sys.version_info[:2] + *sys.version_info[:2], ) def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) + self.set_undefined_options('install_lib', ('install_dir', 'install_dir')) self.target = os.path.join(self.install_dir, self.basename) self.outputs = [self.target] @@ -41,10 +42,11 @@ class install_egg_info(Command): if os.path.isdir(target) and not os.path.islink(target): dir_util.remove_tree(target, dry_run=self.dry_run) elif os.path.exists(target): - self.execute(os.unlink,(self.target,),"Removing "+target) + self.execute(os.unlink, (self.target,), "Removing " + target) elif not os.path.isdir(self.install_dir): - self.execute(os.makedirs, (self.install_dir,), - "Creating "+self.install_dir) + self.execute( + os.makedirs, (self.install_dir,), "Creating " + self.install_dir + ) log.info("Writing %s", target) if not self.dry_run: with open(target, 'w', encoding='UTF-8') as f: @@ -58,6 +60,7 @@ class install_egg_info(Command): # can be replaced by importing them from pkg_resources once it is included # in the stdlib. + def safe_name(name): """Convert an arbitrary string to a standard distribution name @@ -72,7 +75,7 @@ def safe_version(version): Spaces become dots, and all other non-alphanumeric characters become dashes, with runs of multiple dashes condensed to a single dash. """ - version = version.replace(' ','.') + version = version.replace(' ', '.') return re.sub('[^A-Za-z0-9.]+', '-', version) @@ -81,4 +84,4 @@ def to_filename(name): Any '-' characters are currently replaced with '_'. """ - return name.replace('-','_') + return name.replace('-', '_') diff --git a/setuptools/_distutils/command/install_headers.py b/setuptools/_distutils/command/install_headers.py index 9bb0b18..87046ab 100644 --- a/setuptools/_distutils/command/install_headers.py +++ b/setuptools/_distutils/command/install_headers.py @@ -11,11 +11,10 @@ class install_headers(Command): description = "install C/C++ header files" - user_options = [('install-dir=', 'd', - "directory to install header files to"), - ('force', 'f', - "force installation (overwrite existing files)"), - ] + user_options = [ + ('install-dir=', 'd', "directory to install header files to"), + ('force', 'f', "force installation (overwrite existing files)"), + ] boolean_options = ['force'] @@ -25,10 +24,9 @@ class install_headers(Command): self.outfiles = [] def finalize_options(self): - self.set_undefined_options('install', - ('install_headers', 'install_dir'), - ('force', 'force')) - + self.set_undefined_options( + 'install', ('install_headers', 'install_dir'), ('force', 'force') + ) def run(self): headers = self.distribution.headers diff --git a/setuptools/_distutils/command/install_lib.py b/setuptools/_distutils/command/install_lib.py index 6154cf0..ad3089c 100644 --- a/setuptools/_distutils/command/install_lib.py +++ b/setuptools/_distutils/command/install_lib.py @@ -14,6 +14,7 @@ from distutils.errors import DistutilsOptionError # Extension for Python source files. PYTHON_SOURCE_EXTENSION = ".py" + class install_lib(Command): description = "install all Python modules (extensions and pure Python)" @@ -35,18 +36,21 @@ class install_lib(Command): user_options = [ ('install-dir=', 'd', "directory to install to"), - ('build-dir=','b', "build directory (where to install from)"), + ('build-dir=', 'b', "build directory (where to install from)"), ('force', 'f', "force installation (overwrite existing files)"), ('compile', 'c', "compile .py to .pyc [default]"), ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ( + 'optimize=', + 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]", + ), ('skip-build', None, "skip the build steps"), - ] + ] boolean_options = ['force', 'compile', 'skip-build'] - negative_opt = {'no-compile' : 'compile'} + negative_opt = {'no-compile': 'compile'} def initialize_options(self): # let the 'install' command dictate our installation directory @@ -61,14 +65,15 @@ class install_lib(Command): # Get all the information we need to install pure Python modules # from the umbrella 'install' command -- build (source) directory, # install (target) directory, and whether to compile .py files. - self.set_undefined_options('install', - ('build_lib', 'build_dir'), - ('install_lib', 'install_dir'), - ('force', 'force'), - ('compile', 'compile'), - ('optimize', 'optimize'), - ('skip_build', 'skip_build'), - ) + self.set_undefined_options( + 'install', + ('build_lib', 'build_dir'), + ('install_lib', 'install_dir'), + ('force', 'force'), + ('compile', 'compile'), + ('optimize', 'optimize'), + ('skip_build', 'skip_build'), + ) if self.compile is None: self.compile = True @@ -110,8 +115,9 @@ class install_lib(Command): if os.path.isdir(self.build_dir): outfiles = self.copy_tree(self.build_dir, self.install_dir) else: - self.warn("'%s' does not exist -- no Python modules to install" % - self.build_dir) + self.warn( + "'%s' does not exist -- no Python modules to install" % self.build_dir + ) return return outfiles @@ -129,14 +135,22 @@ class install_lib(Command): install_root = self.get_finalized_command('install').root if self.compile: - byte_compile(files, optimize=0, - force=self.force, prefix=install_root, - dry_run=self.dry_run) + byte_compile( + files, + optimize=0, + force=self.force, + prefix=install_root, + dry_run=self.dry_run, + ) if self.optimize > 0: - byte_compile(files, optimize=self.optimize, - force=self.force, prefix=install_root, - verbose=self.verbose, dry_run=self.dry_run) - + byte_compile( + files, + optimize=self.optimize, + force=self.force, + prefix=install_root, + verbose=self.verbose, + dry_run=self.dry_run, + ) # -- Utility methods ----------------------------------------------- @@ -165,15 +179,18 @@ class install_lib(Command): if ext != PYTHON_SOURCE_EXTENSION: continue if self.compile: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization='')) + bytecode_files.append( + importlib.util.cache_from_source(py_file, optimization='') + ) if self.optimize > 0: - bytecode_files.append(importlib.util.cache_from_source( - py_file, optimization=self.optimize)) + bytecode_files.append( + importlib.util.cache_from_source( + py_file, optimization=self.optimize + ) + ) return bytecode_files - # -- External interface -------------------------------------------- # (called by outsiders) @@ -182,19 +199,23 @@ class install_lib(Command): were actually run. Not affected by the "dry-run" flag or whether modules have actually been built yet. """ - pure_outputs = \ - self._mutate_outputs(self.distribution.has_pure_modules(), - 'build_py', 'build_lib', - self.install_dir) + pure_outputs = self._mutate_outputs( + self.distribution.has_pure_modules(), + 'build_py', + 'build_lib', + self.install_dir, + ) if self.compile: bytecode_outputs = self._bytecode_filenames(pure_outputs) else: bytecode_outputs = [] - ext_outputs = \ - self._mutate_outputs(self.distribution.has_ext_modules(), - 'build_ext', 'build_lib', - self.install_dir) + ext_outputs = self._mutate_outputs( + self.distribution.has_ext_modules(), + 'build_ext', + 'build_lib', + self.install_dir, + ) return pure_outputs + bytecode_outputs + ext_outputs diff --git a/setuptools/_distutils/command/install_scripts.py b/setuptools/_distutils/command/install_scripts.py index 31a1130..f09bd64 100644 --- a/setuptools/_distutils/command/install_scripts.py +++ b/setuptools/_distutils/command/install_scripts.py @@ -17,7 +17,7 @@ class install_scripts(Command): user_options = [ ('install-dir=', 'd', "directory to install scripts to"), - ('build-dir=','b', "build directory (where to install from)"), + ('build-dir=', 'b', "build directory (where to install from)"), ('force', 'f', "force installation (overwrite existing files)"), ('skip-build', None, "skip the build steps"), ] @@ -32,11 +32,12 @@ class install_scripts(Command): def finalize_options(self): self.set_undefined_options('build', ('build_scripts', 'build_dir')) - self.set_undefined_options('install', - ('install_scripts', 'install_dir'), - ('force', 'force'), - ('skip_build', 'skip_build'), - ) + self.set_undefined_options( + 'install', + ('install_scripts', 'install_dir'), + ('force', 'force'), + ('skip_build', 'skip_build'), + ) def run(self): if not self.skip_build: diff --git a/setuptools/_distutils/command/py37compat.py b/setuptools/_distutils/command/py37compat.py index 754715a..aa0c0a7 100644 --- a/setuptools/_distutils/command/py37compat.py +++ b/setuptools/_distutils/command/py37compat.py @@ -7,12 +7,13 @@ def _pythonlib_compat(): library. See pypa/distutils#9. """ from distutils import sysconfig + if not sysconfig.get_config_var('Py_ENABLED_SHARED'): return yield 'python{}.{}{}'.format( sys.hexversion >> 24, - (sys.hexversion >> 16) & 0xff, + (sys.hexversion >> 16) & 0xFF, sysconfig.get_config_var('ABIFLAGS'), ) diff --git a/setuptools/_distutils/command/register.py b/setuptools/_distutils/command/register.py index 0fac94e..ca407eb 100644 --- a/setuptools/_distutils/command/register.py +++ b/setuptools/_distutils/command/register.py @@ -14,17 +14,23 @@ from distutils.core import PyPIRCCommand from distutils.errors import * from distutils import log + class register(PyPIRCCommand): - description = ("register the distribution with the Python package index") + description = "register the distribution with the Python package index" user_options = PyPIRCCommand.user_options + [ - ('list-classifiers', None, - 'list the valid Trove classifiers'), - ('strict', None , - 'Will stop the registering if the meta-data are not fully compliant') - ] + ('list-classifiers', None, 'list the valid Trove classifiers'), + ( + 'strict', + None, + 'Will stop the registering if the meta-data are not fully compliant', + ), + ] boolean_options = PyPIRCCommand.boolean_options + [ - 'verify', 'list-classifiers', 'strict'] + 'verify', + 'list-classifiers', + 'strict', + ] sub_commands = [('check', lambda self: True)] @@ -36,8 +42,10 @@ class register(PyPIRCCommand): def finalize_options(self): PyPIRCCommand.finalize_options(self) # setting options for the `check` subcommand - check_options = {'strict': ('register', self.strict), - 'restructuredtext': ('register', 1)} + check_options = { + 'strict': ('register', self.strict), + 'restructuredtext': ('register', 1), + } self.distribution.command_options['check'] = check_options def run(self): @@ -57,8 +65,11 @@ class register(PyPIRCCommand): def check_metadata(self): """Deprecated API.""" - warn("distutils.command.register.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) + warn( + "distutils.command.register.check_metadata is deprecated, \ + use the check command instead", + PendingDeprecationWarning, + ) check = self.distribution.get_command_obj('check') check.ensure_finalized() check.strict = self.strict @@ -66,8 +77,7 @@ class register(PyPIRCCommand): check.run() def _set_config(self): - ''' Reads the configuration file and set attributes. - ''' + '''Reads the configuration file and set attributes.''' config = self._read_pypirc() if config != {}: self.username = config['username'] @@ -83,45 +93,43 @@ class register(PyPIRCCommand): self.has_config = False def classifiers(self): - ''' Fetch the list of classifiers from the server. - ''' - url = self.repository+'?:action=list_classifiers' + '''Fetch the list of classifiers from the server.''' + url = self.repository + '?:action=list_classifiers' response = urllib.request.urlopen(url) log.info(self._read_pypi_response(response)) def verify_metadata(self): - ''' Send the metadata to the package index server to be checked. - ''' + '''Send the metadata to the package index server to be checked.''' # send the info to the server and report the result (code, result) = self.post_to_server(self.build_post_data('verify')) log.info('Server response (%s): %s', code, result) def send_metadata(self): - ''' Send the metadata to the package index server. + '''Send the metadata to the package index server. - Well, do the following: - 1. figure who the user is, and then - 2. send the data as a Basic auth'ed POST. + Well, do the following: + 1. figure who the user is, and then + 2. send the data as a Basic auth'ed POST. - First we try to read the username/password from $HOME/.pypirc, - which is a ConfigParser-formatted file with a section - [distutils] containing username and password entries (both - in clear text). Eg: + First we try to read the username/password from $HOME/.pypirc, + which is a ConfigParser-formatted file with a section + [distutils] containing username and password entries (both + in clear text). Eg: - [distutils] - index-servers = - pypi + [distutils] + index-servers = + pypi - [pypi] - username: fred - password: sekrit + [pypi] + username: fred + password: sekrit - Otherwise, to figure who the user is, we offer the user three - choices: + Otherwise, to figure who the user is, we offer the user three + choices: - 1. use existing login, - 2. register as a new user, or - 3. set the password to a random string and email the user. + 1. use existing login, + 2. register as a new user, or + 3. set the password to a random string and email the user. ''' # see if we can short-cut and get the username/password from the @@ -137,13 +145,16 @@ class register(PyPIRCCommand): # get the user's login info choices = '1 2 3 4'.split() while choice not in choices: - self.announce('''\ + self.announce( + '''\ We need to know who you are, so please choose either: 1. use your existing login, 2. register as a new user, 3. have the server generate a new password for you (and email it to you), or 4. quit -Your selection [default 1]: ''', log.INFO) +Your selection [default 1]: ''', + log.INFO, + ) choice = input() if not choice: choice = '1' @@ -162,10 +173,8 @@ Your selection [default 1]: ''', log.INFO) host = urllib.parse.urlparse(self.repository)[1] auth.add_password(self.realm, host, username, password) # send the info to the server and report the result - code, result = self.post_to_server(self.build_post_data('submit'), - auth) - self.announce('Server response (%s): %s' % (code, result), - log.INFO) + code, result = self.post_to_server(self.build_post_data('submit'), auth) + self.announce('Server response (%s): %s' % (code, result), log.INFO) # possibly save the login if code == 200: @@ -174,10 +183,17 @@ Your selection [default 1]: ''', log.INFO) # so the upload command can reuse it self.distribution.password = password else: - self.announce(('I can store your PyPI login so future ' - 'submissions will be faster.'), log.INFO) - self.announce('(the login will be stored in %s)' % \ - self._get_rc_file(), log.INFO) + self.announce( + ( + 'I can store your PyPI login so future ' + 'submissions will be faster.' + ), + log.INFO, + ) + self.announce( + '(the login will be stored in %s)' % self._get_rc_file(), + log.INFO, + ) choice = 'X' while choice.lower() not in 'yn': choice = input('Save your login (y/N)?') @@ -208,8 +224,7 @@ Your selection [default 1]: ''', log.INFO) log.info('Server response (%s): %s', code, result) else: log.info('You will receive an email shortly.') - log.info(('Follow the instructions in it to ' - 'complete registration.')) + log.info(('Follow the instructions in it to ' 'complete registration.')) elif choice == '3': data = {':action': 'password_reset'} data['email'] = '' @@ -224,7 +239,7 @@ Your selection [default 1]: ''', log.INFO) meta = self.distribution.metadata data = { ':action': action, - 'metadata_version' : '1.0', + 'metadata_version': '1.0', 'name': meta.get_name(), 'version': meta.get_version(), 'summary': meta.get_description(), @@ -247,12 +262,11 @@ Your selection [default 1]: ''', log.INFO) return data def post_to_server(self, data, auth=None): - ''' Post a query to the server, and return a string response. - ''' + '''Post a query to the server, and return a string response.''' if 'name' in data: - self.announce('Registering %s to %s' % (data['name'], - self.repository), - log.INFO) + self.announce( + 'Registering %s to %s' % (data['name'], self.repository), log.INFO + ) # Build up the MIME payload for the urllib2 POST data boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' sep_boundary = '\n--' + boundary @@ -260,12 +274,12 @@ Your selection [default 1]: ''', log.INFO) body = io.StringIO() for key, value in data.items(): # handle multiple entries for the same name - if type(value) not in (type([]), type( () )): + if type(value) not in (type([]), type(())): value = [value] for value in value: value = str(value) body.write(sep_boundary) - body.write('\nContent-Disposition: form-data; name="%s"'%key) + body.write('\nContent-Disposition: form-data; name="%s"' % key) body.write("\n\n") body.write(value) if value and value[-1] == '\r': @@ -276,8 +290,9 @@ Your selection [default 1]: ''', log.INFO) # build the Request headers = { - 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary, - 'Content-length': str(len(body)) + 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8' + % boundary, + 'Content-length': str(len(body)), } req = urllib.request.Request(self.repository, body, headers) diff --git a/setuptools/_distutils/command/sdist.py b/setuptools/_distutils/command/sdist.py index b4996fc..aad3e71 100644 --- a/setuptools/_distutils/command/sdist.py +++ b/setuptools/_distutils/command/sdist.py @@ -24,13 +24,12 @@ def show_formats(): """ from distutils.fancy_getopt import FancyGetopt from distutils.archive_util import ARCHIVE_FORMATS + formats = [] for format in ARCHIVE_FORMATS.keys(): - formats.append(("formats=" + format, None, - ARCHIVE_FORMATS[format][2])) + formats.append(("formats=" + format, None, ARCHIVE_FORMATS[format][2])) formats.sort() - FancyGetopt(formats).print_help( - "List of available source distribution formats:") + FancyGetopt(formats).print_help("List of available source distribution formats:") class sdist(Command): @@ -44,55 +43,77 @@ class sdist(Command): return self.metadata_check user_options = [ - ('template=', 't', - "name of manifest template file [default: MANIFEST.in]"), - ('manifest=', 'm', - "name of manifest file [default: MANIFEST]"), - ('use-defaults', None, - "include the default file set in the manifest " - "[default; disable with --no-defaults]"), - ('no-defaults', None, - "don't include the default file set"), - ('prune', None, - "specifically exclude files/directories that should not be " - "distributed (build tree, RCS/CVS dirs, etc.) " - "[default; disable with --no-prune]"), - ('no-prune', None, - "don't automatically exclude anything"), - ('manifest-only', 'o', - "just regenerate the manifest and then stop " - "(implies --force-manifest)"), - ('force-manifest', 'f', - "forcibly regenerate the manifest and carry on as usual. " - "Deprecated: now the manifest is always regenerated."), - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ('metadata-check', None, - "Ensure that all required elements of meta-data " - "are supplied. Warn if any missing. [default]"), - ('owner=', 'u', - "Owner name used when creating a tar file [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file [default: current group]"), - ] - - boolean_options = ['use-defaults', 'prune', - 'manifest-only', 'force-manifest', - 'keep-temp', 'metadata-check'] + ('template=', 't', "name of manifest template file [default: MANIFEST.in]"), + ('manifest=', 'm', "name of manifest file [default: MANIFEST]"), + ( + 'use-defaults', + None, + "include the default file set in the manifest " + "[default; disable with --no-defaults]", + ), + ('no-defaults', None, "don't include the default file set"), + ( + 'prune', + None, + "specifically exclude files/directories that should not be " + "distributed (build tree, RCS/CVS dirs, etc.) " + "[default; disable with --no-prune]", + ), + ('no-prune', None, "don't automatically exclude anything"), + ( + 'manifest-only', + 'o', + "just regenerate the manifest and then stop " "(implies --force-manifest)", + ), + ( + 'force-manifest', + 'f', + "forcibly regenerate the manifest and carry on as usual. " + "Deprecated: now the manifest is always regenerated.", + ), + ('formats=', None, "formats for source distribution (comma-separated list)"), + ( + 'keep-temp', + 'k', + "keep the distribution tree around after creating " + "archive file(s)", + ), + ( + 'dist-dir=', + 'd', + "directory to put the source distribution archive(s) in " "[default: dist]", + ), + ( + 'metadata-check', + None, + "Ensure that all required elements of meta-data " + "are supplied. Warn if any missing. [default]", + ), + ( + 'owner=', + 'u', + "Owner name used when creating a tar file [default: current user]", + ), + ( + 'group=', + 'g', + "Group name used when creating a tar file [default: current group]", + ), + ] + + boolean_options = [ + 'use-defaults', + 'prune', + 'manifest-only', + 'force-manifest', + 'keep-temp', + 'metadata-check', + ] help_options = [ - ('help-formats', None, - "list available distribution formats", show_formats), - ] + ('help-formats', None, "list available distribution formats", show_formats), + ] - negative_opt = {'no-defaults': 'use-defaults', - 'no-prune': 'prune' } + negative_opt = {'no-defaults': 'use-defaults', 'no-prune': 'prune'} sub_commands = [('check', checking_metadata)] @@ -131,8 +152,7 @@ class sdist(Command): bad_format = archive_util.check_archive_formats(self.formats) if bad_format: - raise DistutilsOptionError( - "unknown archive format '%s'" % bad_format) + raise DistutilsOptionError("unknown archive format '%s'" % bad_format) if self.dist_dir is None: self.dist_dir = "dist" @@ -161,8 +181,11 @@ class sdist(Command): def check_metadata(self): """Deprecated API.""" - warn("distutils.command.sdist.check_metadata is deprecated, \ - use the check command instead", PendingDeprecationWarning) + warn( + "distutils.command.sdist.check_metadata is deprecated, \ + use the check command instead", + PendingDeprecationWarning, + ) check = self.distribution.get_command_obj('check') check.ensure_finalized() check.run() @@ -189,9 +212,10 @@ class sdist(Command): return if not template_exists: - self.warn(("manifest template '%s' does not exist " + - "(using default file list)") % - self.template) + self.warn( + ("manifest template '%s' does not exist " + "(using default file list)") + % self.template + ) self.filelist.findall() if self.use_defaults: @@ -259,8 +283,9 @@ class sdist(Command): break if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) + self.warn( + "standard file not found: should have one of " + ', '.join(alts) + ) else: if self._cs_path_exists(fn): self.filelist.append(fn) @@ -328,14 +353,20 @@ class sdist(Command): 'self.filelist', which updates itself accordingly. """ log.info("reading manifest template '%s'", self.template) - template = TextFile(self.template, strip_comments=1, skip_blanks=1, - join_lines=1, lstrip_ws=1, rstrip_ws=1, - collapse_join=1) + template = TextFile( + self.template, + strip_comments=1, + skip_blanks=1, + join_lines=1, + lstrip_ws=1, + rstrip_ws=1, + collapse_join=1, + ) try: while True: line = template.readline() - if line is None: # end of file + if line is None: # end of file break try: @@ -344,9 +375,10 @@ class sdist(Command): # malformed lines, or a ValueError from the lower-level # convert_path function except (DistutilsTemplateError, ValueError) as msg: - self.warn("%s, line %d: %s" % (template.filename, - template.current_line, - msg)) + self.warn( + "%s, line %d: %s" + % (template.filename, template.current_line, msg) + ) finally: template.close() @@ -369,8 +401,7 @@ class sdist(Command): else: seps = '/' - vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', - '_darcs'] + vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs'] vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps) self.filelist.exclude_pattern(vcs_ptrn, is_regex=1) @@ -380,14 +411,19 @@ class sdist(Command): named by 'self.manifest'. """ if self._manifest_is_not_generated(): - log.info("not writing to manually maintained " - "manifest file '%s'" % self.manifest) + log.info( + "not writing to manually maintained " + "manifest file '%s'" % self.manifest + ) return content = self.filelist.files[:] content.insert(0, '# file GENERATED by distutils, do NOT edit') - self.execute(file_util.write_file, (self.manifest, content), - "writing manifest file '%s'" % self.manifest) + self.execute( + file_util.write_file, + (self.manifest, content), + "writing manifest file '%s'" % self.manifest, + ) def _manifest_is_not_generated(self): # check for special comment used in 3.1.3 and higher @@ -437,10 +473,10 @@ class sdist(Command): # out-of-date, because by default we blow away 'base_dir' when # we're done making the distribution archives.) - if hasattr(os, 'link'): # can make hard links on this system + if hasattr(os, 'link'): # can make hard links on this system link = 'hard' msg = "making hard links in %s..." % base_dir - else: # nope, have to copy + else: # nope, have to copy link = None msg = "copying files to %s..." % base_dir @@ -471,14 +507,15 @@ class sdist(Command): base_name = os.path.join(self.dist_dir, base_dir) self.make_release_tree(base_dir, self.filelist.files) - archive_files = [] # remember names of files we create + archive_files = [] # remember names of files we create # tar archive must be created last to avoid overwrite and remove if 'tar' in self.formats: self.formats.append(self.formats.pop(self.formats.index('tar'))) for fmt in self.formats: - file = self.make_archive(base_name, fmt, base_dir=base_dir, - owner=self.owner, group=self.group) + file = self.make_archive( + base_name, fmt, base_dir=base_dir, owner=self.owner, group=self.group + ) archive_files.append(file) self.distribution.dist_files.append(('sdist', '', file)) diff --git a/setuptools/_distutils/command/upload.py b/setuptools/_distutils/command/upload.py index 95e9fda..782e3de 100644 --- a/setuptools/_distutils/command/upload.py +++ b/setuptools/_distutils/command/upload.py @@ -31,10 +31,9 @@ class upload(PyPIRCCommand): description = "upload binary package to PyPI" user_options = PyPIRCCommand.user_options + [ - ('sign', 's', - 'sign files to upload using gpg'), + ('sign', 's', 'sign files to upload using gpg'), ('identity=', 'i', 'GPG identity used to sign files'), - ] + ] boolean_options = PyPIRCCommand.boolean_options + ['sign'] @@ -49,9 +48,7 @@ class upload(PyPIRCCommand): def finalize_options(self): PyPIRCCommand.finalize_options(self) if self.identity and not self.sign: - raise DistutilsOptionError( - "Must use --sign for --identity to have meaning" - ) + raise DistutilsOptionError("Must use --sign for --identity to have meaning") config = self._read_pypirc() if config != {}: self.username = config['username'] @@ -66,16 +63,17 @@ class upload(PyPIRCCommand): def run(self): if not self.distribution.dist_files: - msg = ("Must create and upload files in one command " - "(e.g. setup.py sdist upload)") + msg = ( + "Must create and upload files in one command " + "(e.g. setup.py sdist upload)" + ) raise DistutilsOptionError(msg) for command, pyversion, filename in self.distribution.dist_files: self.upload_file(command, pyversion, filename) def upload_file(self, command, pyversion, filename): # Makes sure the repository URL is compliant - schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) + schema, netloc, url, params, query, fragments = urlparse(self.repository) if params or query or fragments: raise AssertionError("Incompatible url %s" % self.repository) @@ -87,12 +85,11 @@ class upload(PyPIRCCommand): gpg_args = ["gpg", "--detach-sign", "-a", filename] if self.identity: gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) + spawn(gpg_args, dry_run=self.dry_run) # Fill in the data - send all the meta-data in case we need to # register a new release - f = open(filename,'rb') + f = open(filename, 'rb') try: content = f.read() finally: @@ -103,16 +100,13 @@ class upload(PyPIRCCommand): # action ':action': 'file_upload', 'protocol_version': '1', - # identify release 'name': meta.get_name(), 'version': meta.get_version(), - # file content - 'content': (os.path.basename(filename),content), + 'content': (os.path.basename(filename), content), 'filetype': command, 'pyversion': pyversion, - # additional meta-data 'metadata_version': '1.0', 'summary': meta.get_description(), @@ -129,7 +123,7 @@ class upload(PyPIRCCommand): 'provides': meta.get_provides(), 'requires': meta.get_requires(), 'obsoletes': meta.get_obsoletes(), - } + } data['comment'] = '' @@ -145,8 +139,7 @@ class upload(PyPIRCCommand): if self.sign: with open(filename + ".asc", "rb") as f: - data['gpg_signature'] = (os.path.basename(filename) + ".asc", - f.read()) + data['gpg_signature'] = (os.path.basename(filename) + ".asc", f.read()) # set up the authentication user_pass = (self.username + ":" + self.password).encode('ascii') @@ -187,8 +180,7 @@ class upload(PyPIRCCommand): 'Authorization': auth, } - request = Request(self.repository, data=body, - headers=headers) + request = Request(self.repository, data=body, headers=headers) # send the data try: result = urlopen(request) @@ -202,8 +194,7 @@ class upload(PyPIRCCommand): raise if status == 200: - self.announce('Server response (%s): %s' % (status, reason), - log.INFO) + self.announce('Server response (%s): %s' % (status, reason), log.INFO) if self.show_response: text = self._read_pypi_response(result) msg = '\n'.join(('-' * 75, text, '-' * 75)) diff --git a/setuptools/_distutils/config.py b/setuptools/_distutils/config.py index 2171abd..34a1c3b 100644 --- a/setuptools/_distutils/config.py +++ b/setuptools/_distutils/config.py @@ -18,20 +18,19 @@ username:%s password:%s """ + class PyPIRCCommand(Command): - """Base command that knows how to handle the .pypirc file - """ + """Base command that knows how to handle the .pypirc file""" + DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' DEFAULT_REALM = 'pypi' repository = None realm = None user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % \ - DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server')] + ('repository=', 'r', "url of repository [default: %s]" % DEFAULT_REPOSITORY), + ('show-response', None, 'display full response text from server'), + ] boolean_options = ['show-response'] @@ -58,9 +57,11 @@ class PyPIRCCommand(Command): if 'distutils' in sections: # let's get the list of servers index_servers = config.get('distutils', 'index-servers') - _servers = [server.strip() for server in - index_servers.split('\n') - if server.strip() != ''] + _servers = [ + server.strip() + for server in index_servers.split('\n') + if server.strip() != '' + ] if _servers == []: # nothing set, let's try to get the default pypi if 'pypi' in sections: @@ -74,10 +75,11 @@ class PyPIRCCommand(Command): current['username'] = config.get(server, 'username') # optional params - for key, default in (('repository', - self.DEFAULT_REPOSITORY), - ('realm', self.DEFAULT_REALM), - ('password', None)): + for key, default in ( + ('repository', self.DEFAULT_REPOSITORY), + ('realm', self.DEFAULT_REALM), + ('password', None), + ): if config.has_option(server, key): current[key] = config.get(server, key) else: @@ -86,13 +88,17 @@ class PyPIRCCommand(Command): # work around people having "repository" for the "pypi" # section of their config set to the HTTP (rather than # HTTPS) URL - if (server == 'pypi' and - repository in (self.DEFAULT_REPOSITORY, 'pypi')): + if server == 'pypi' and repository in ( + self.DEFAULT_REPOSITORY, + 'pypi', + ): current['repository'] = self.DEFAULT_REPOSITORY return current - if (current['server'] == repository or - current['repository'] == repository): + if ( + current['server'] == repository + or current['repository'] == repository + ): return current elif 'server-login' in sections: # old format @@ -101,17 +107,20 @@ class PyPIRCCommand(Command): repository = config.get(server, 'repository') else: repository = self.DEFAULT_REPOSITORY - return {'username': config.get(server, 'username'), - 'password': config.get(server, 'password'), - 'repository': repository, - 'server': server, - 'realm': self.DEFAULT_REALM} + return { + 'username': config.get(server, 'username'), + 'password': config.get(server, 'password'), + 'repository': repository, + 'server': server, + 'realm': self.DEFAULT_REALM, + } return {} def _read_pypi_response(self, response): """Read and decode a PyPI HTTP response.""" import cgi + content_type = response.getheader('content-type', 'text/plain') encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii') return response.read().decode(encoding) diff --git a/setuptools/_distutils/core.py b/setuptools/_distutils/core.py index f43888e..305ecf6 100644 --- a/setuptools/_distutils/core.py +++ b/setuptools/_distutils/core.py @@ -30,7 +30,8 @@ usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] or: %(script)s cmd --help """ -def gen_usage (script_name): + +def gen_usage(script_name): script = os.path.basename(script_name) return USAGE % vars() @@ -40,22 +41,51 @@ _setup_stop_after = None _setup_distribution = None # Legal keyword arguments for the setup() function -setup_keywords = ('distclass', 'script_name', 'script_args', 'options', - 'name', 'version', 'author', 'author_email', - 'maintainer', 'maintainer_email', 'url', 'license', - 'description', 'long_description', 'keywords', - 'platforms', 'classifiers', 'download_url', - 'requires', 'provides', 'obsoletes', - ) +setup_keywords = ( + 'distclass', + 'script_name', + 'script_args', + 'options', + 'name', + 'version', + 'author', + 'author_email', + 'maintainer', + 'maintainer_email', + 'url', + 'license', + 'description', + 'long_description', + 'keywords', + 'platforms', + 'classifiers', + 'download_url', + 'requires', + 'provides', + 'obsoletes', +) # Legal keyword arguments for the Extension constructor -extension_keywords = ('name', 'sources', 'include_dirs', - 'define_macros', 'undef_macros', - 'library_dirs', 'libraries', 'runtime_library_dirs', - 'extra_objects', 'extra_compile_args', 'extra_link_args', - 'swig_opts', 'export_symbols', 'depends', 'language') - -def setup (**attrs): +extension_keywords = ( + 'name', + 'sources', + 'include_dirs', + 'define_macros', + 'undef_macros', + 'library_dirs', + 'libraries', + 'runtime_library_dirs', + 'extra_objects', + 'extra_compile_args', + 'extra_link_args', + 'swig_opts', + 'export_symbols', + 'depends', + 'language', +) + + +def setup(**attrs): """The gateway to the Distutils: do everything your setup script needs to do, in a highly flexible and user-driven way. Briefly: create a Distribution instance; find and parse config files; parse the command @@ -100,7 +130,7 @@ def setup (**attrs): if 'script_name' not in attrs: attrs['script_name'] = os.path.basename(sys.argv[0]) - if 'script_args' not in attrs: + if 'script_args' not in attrs: attrs['script_args'] = sys.argv[1:] # Create the Distribution instance, using the remaining arguments @@ -111,8 +141,7 @@ def setup (**attrs): if 'name' not in attrs: raise SystemExit("error in setup command: %s" % msg) else: - raise SystemExit("error in %s setup command: %s" % \ - (attrs['name'], msg)) + raise SystemExit("error in %s setup command: %s" % (attrs['name'], msg)) if _setup_stop_after == "init": return dist @@ -149,10 +178,11 @@ def setup (**attrs): return dist + # setup () -def run_commands (dist): +def run_commands(dist): """Given a Distribution object run all the commands, raising ``SystemExit`` errors in the case of failure. @@ -170,8 +200,7 @@ def run_commands (dist): else: raise SystemExit("error: %s" % (exc,)) - except (DistutilsError, - CCompilerError) as msg: + except (DistutilsError, CCompilerError) as msg: if DEBUG: raise else: @@ -180,7 +209,7 @@ def run_commands (dist): return dist -def run_setup (script_name, script_args=None, stop_after="run"): +def run_setup(script_name, script_args=None, stop_after="run"): """Run a setup script in a somewhat controlled environment, and return the Distribution instance that drives things. This is useful if you need to find out the distribution meta-data (passed as @@ -237,13 +266,18 @@ def run_setup (script_name, script_args=None, stop_after="run"): pass if _setup_distribution is None: - raise RuntimeError(("'distutils.core.setup()' was never called -- " - "perhaps '%s' is not a Distutils setup script?") % \ - script_name) + raise RuntimeError( + ( + "'distutils.core.setup()' was never called -- " + "perhaps '%s' is not a Distutils setup script?" + ) + % script_name + ) # I wonder if the setup script's namespace -- g and l -- would be of # any interest to callers? - #print "_setup_distribution:", _setup_distribution + # print "_setup_distribution:", _setup_distribution return _setup_distribution + # run_setup () diff --git a/setuptools/_distutils/cygwinccompiler.py b/setuptools/_distutils/cygwinccompiler.py index c5c86d8..931b366 100644 --- a/setuptools/_distutils/cygwinccompiler.py +++ b/setuptools/_distutils/cygwinccompiler.py @@ -56,17 +56,22 @@ from subprocess import check_output from distutils.unixccompiler import UnixCCompiler from distutils.file_util import write_file -from distutils.errors import (DistutilsExecError, CCompilerError, - CompileError, UnknownFileError) +from distutils.errors import ( + DistutilsExecError, + CCompilerError, + CompileError, + UnknownFileError, +) from distutils.version import LooseVersion, suppress_known_deprecation + def get_msvcr(): """Include the appropriate MSVC runtime library if Python was built with MSVC 7.0 or later. """ msc_pos = sys.version.find('MSC v.') if msc_pos != -1: - msc_ver = sys.version[msc_pos+6:msc_pos+10] + msc_ver = sys.version[msc_pos + 6 : msc_pos + 10] if msc_ver == '1300': # MSVC 7.0 return ['msvcr70'] @@ -90,20 +95,22 @@ def get_msvcr(): return ['msvcr120'] elif 1900 <= int(msc_ver) < 2000: # VS2015 / MSVC 14.0 - return ['ucrt', 'vcruntime140'] + return ['ucrt', 'vcruntime140'] else: raise ValueError("Unknown MS Compiler version %s " % msc_ver) class CygwinCCompiler(UnixCCompiler): - """ Handles the Cygwin port of the GNU C compiler to Windows. - """ + """Handles the Cygwin port of the GNU C compiler to Windows.""" + compiler_type = 'cygwin' obj_extension = ".o" static_lib_extension = ".a" - shared_lib_extension = ".dll" + shared_lib_extension = ".dll.a" + dylib_lib_extension = ".dll" static_lib_format = "lib%s%s" - shared_lib_format = "%s%s" + shared_lib_format = "lib%s%s" + dylib_lib_format = "cyg%s%s" exe_extension = ".exe" def __init__(self, verbose=0, dry_run=0, force=0): @@ -111,14 +118,13 @@ class CygwinCCompiler(UnixCCompiler): super().__init__(verbose, dry_run, force) status, details = check_config_h() - self.debug_print("Python's GCC status: %s (details: %s)" % - (status, details)) + self.debug_print("Python's GCC status: %s (details: %s)" % (status, details)) if status is not CONFIG_H_OK: self.warn( "Python's pyconfig.h doesn't seem to support your compiler. " "Reason: %s. " - "Compiling may fail because of undefined preprocessor macros." - % details) + "Compiling may fail because of undefined preprocessor macros." % details + ) self.cc = os.environ.get('CC', 'gcc') self.cxx = os.environ.get('CXX', 'g++') @@ -126,12 +132,13 @@ class CygwinCCompiler(UnixCCompiler): self.linker_dll = self.cc shared_option = "-shared" - self.set_executables(compiler='%s -mcygwin -O -Wall' % self.cc, - compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, - compiler_cxx='%s -mcygwin -O -Wall' % self.cxx, - linker_exe='%s -mcygwin' % self.cc, - linker_so=('%s -mcygwin %s' % - (self.linker_dll, shared_option))) + self.set_executables( + compiler='%s -mcygwin -O -Wall' % self.cc, + compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, + compiler_cxx='%s -mcygwin -O -Wall' % self.cxx, + linker_exe='%s -mcygwin' % self.cc, + linker_so=('%s -mcygwin %s' % (self.linker_dll, shared_option)), + ) # Include the appropriate MSVC runtime library if Python was built # with MSVC 7.0 or later. @@ -160,17 +167,30 @@ class CygwinCCompiler(UnixCCompiler): self.spawn(["windres", "-i", src, "-o", obj]) except DistutilsExecError as msg: raise CompileError(msg) - else: # for other files use the C-compiler + else: # for other files use the C-compiler try: - self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) + self.spawn( + self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs + ) except DistutilsExecError as msg: raise CompileError(msg) - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): """Link the objects.""" # use separate copies, so we can modify the lists extra_preargs = copy.copy(extra_preargs or []) @@ -182,8 +202,9 @@ class CygwinCCompiler(UnixCCompiler): # handle export symbols by creating a def-file # with executables this only works with gcc/ld as linker - if ((export_symbols is not None) and - (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): + if (export_symbols is not None) and ( + target_desc != self.EXECUTABLE or self.linker_dll == "gcc" + ): # (The linker doesn't do anything if output is up-to-date. # So it would probably better to check if we really need this, # but for this we had to insert some unchanged parts of @@ -195,29 +216,27 @@ class CygwinCCompiler(UnixCCompiler): temp_dir = os.path.dirname(objects[0]) # name of dll to give the helper files the same base name (dll_name, dll_extension) = os.path.splitext( - os.path.basename(output_filename)) + os.path.basename(output_filename) + ) # generate the filenames for these files def_file = os.path.join(temp_dir, dll_name + ".def") lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a") # Generate .def file - contents = [ - "LIBRARY %s" % os.path.basename(output_filename), - "EXPORTS"] + contents = ["LIBRARY %s" % os.path.basename(output_filename), "EXPORTS"] for sym in export_symbols: contents.append(sym) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) + self.execute(write_file, (def_file, contents), "writing %s" % def_file) # next add options for def-file and to creating import libraries # doesn't work: bfd_close build\...\libfoo.a: Invalid operation - #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) + # extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file]) # for gcc/ld the def-file is specified as any object files objects.append(def_file) - #end: if ((export_symbols is not None) and + # end: if ((export_symbols is not None) and # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): # who wants symbols and a many times larger output file @@ -229,12 +248,22 @@ class CygwinCCompiler(UnixCCompiler): if not debug: extra_preargs.append("-s") - UnixCCompiler.link(self, target_desc, objects, output_filename, - output_dir, libraries, library_dirs, - runtime_library_dirs, - None, # export_symbols, we do this in our def-file - debug, extra_preargs, extra_postargs, build_temp, - target_lang) + UnixCCompiler.link( + self, + target_desc, + objects, + output_filename, + output_dir, + libraries, + library_dirs, + runtime_library_dirs, + None, # export_symbols, we do this in our def-file + debug, + extra_preargs, + extra_postargs, + build_temp, + target_lang, + ) # -- Miscellaneous methods ----------------------------------------- @@ -246,54 +275,57 @@ class CygwinCCompiler(UnixCCompiler): for src_name in source_filenames: # use normcase to make sure '.rc' is really '.rc' and not '.RC' base, ext = os.path.splitext(os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) + if ext not in (self.src_extensions + ['.rc', '.res']): + raise UnknownFileError( + "unknown file type '%s' (from '%s')" % (ext, src_name) + ) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext in ('.res', '.rc'): # these need to be compiled to object files - obj_names.append (os.path.join(output_dir, - base + ext + self.obj_extension)) + obj_names.append( + os.path.join(output_dir, base + ext + self.obj_extension) + ) else: - obj_names.append (os.path.join(output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, base + self.obj_extension)) return obj_names + # the same as cygwin plus some additional parameters class Mingw32CCompiler(CygwinCCompiler): - """ Handles the Mingw32 port of the GNU C compiler to Windows. - """ + """Handles the Mingw32 port of the GNU C compiler to Windows.""" + compiler_type = 'mingw32' def __init__(self, verbose=0, dry_run=0, force=0): - super().__init__ (verbose, dry_run, force) + super().__init__(verbose, dry_run, force) shared_option = "-shared" if is_cygwincc(self.cc): - raise CCompilerError( - 'Cygwin gcc cannot be used with --compiler=mingw32') - - self.set_executables(compiler='%s -O -Wall' % self.cc, - compiler_so='%s -mdll -O -Wall' % self.cc, - compiler_cxx='%s -O -Wall' % self.cxx, - linker_exe='%s' % self.cc, - linker_so='%s %s' - % (self.linker_dll, shared_option)) + raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32') + + self.set_executables( + compiler='%s -O -Wall' % self.cc, + compiler_so='%s -mdll -O -Wall' % self.cc, + compiler_cxx='%s -O -Wall' % self.cxx, + linker_exe='%s' % self.cc, + linker_so='%s %s' % (self.linker_dll, shared_option), + ) # Maybe we should also append -mthreads, but then the finished # dlls need another dll (mingwm10.dll see Mingw32 docs) # (-mthreads: Support thread-safe exception handling on `Mingw32') # no additional libraries needed - self.dll_libraries=[] + self.dll_libraries = [] # Include the appropriate MSVC runtime library if Python was built # with MSVC 7.0 or later. self.dll_libraries = get_msvcr() + # Because these compilers aren't configured in Python's pyconfig.h file by # default, we should at least warn the user if he is using an unmodified # version. @@ -302,6 +334,7 @@ CONFIG_H_OK = "ok" CONFIG_H_NOTOK = "not ok" CONFIG_H_UNCERTAIN = "uncertain" + def check_config_h(): """Check if the current Python installation appears amenable to building extensions with GCC. @@ -346,8 +379,8 @@ def check_config_h(): finally: config_h.close() except OSError as exc: - return (CONFIG_H_UNCERTAIN, - "couldn't read '%s': %s" % (fn, exc.strerror)) + return (CONFIG_H_UNCERTAIN, "couldn't read '%s': %s" % (fn, exc.strerror)) + def is_cygwincc(cc): '''Try to determine if the compiler that would be used is from cygwin.''' diff --git a/setuptools/_distutils/dep_util.py b/setuptools/_distutils/dep_util.py index d74f5e4..d94e111 100644 --- a/setuptools/_distutils/dep_util.py +++ b/setuptools/_distutils/dep_util.py @@ -8,28 +8,29 @@ import os from distutils.errors import DistutilsFileError -def newer (source, target): +def newer(source, target): """Return true if 'source' exists and is more recently modified than 'target', or if 'source' exists and 'target' doesn't. Return false if both exist and 'target' is the same age or younger than 'source'. Raise DistutilsFileError if 'source' does not exist. """ if not os.path.exists(source): - raise DistutilsFileError("file '%s' does not exist" % - os.path.abspath(source)) + raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source)) if not os.path.exists(target): return 1 from stat import ST_MTIME + mtime1 = os.stat(source)[ST_MTIME] mtime2 = os.stat(target)[ST_MTIME] return mtime1 > mtime2 + # newer () -def newer_pairwise (sources, targets): +def newer_pairwise(sources, targets): """Walk two filename lists in parallel, testing if each source is newer than its corresponding target. Return a pair of lists (sources, targets) where source is newer than target, according to the semantics @@ -48,10 +49,11 @@ def newer_pairwise (sources, targets): return (n_sources, n_targets) + # newer_pairwise () -def newer_group (sources, target, missing='error'): +def newer_group(sources, target, missing='error'): """Return true if 'target' is out-of-date with respect to any file listed in 'sources'. In other words, if 'target' exists and is newer than every file in 'sources', return false; otherwise return true. @@ -73,15 +75,16 @@ def newer_group (sources, target, missing='error'): # we can immediately return true. If we fall through to the end # of the loop, then 'target' is up-to-date and we return false. from stat import ST_MTIME + target_mtime = os.stat(target)[ST_MTIME] for source in sources: if not os.path.exists(source): - if missing == 'error': # blow up when we stat() the file + if missing == 'error': # blow up when we stat() the file pass - elif missing == 'ignore': # missing source dropped from - continue # target's dependency list - elif missing == 'newer': # missing source means target is - return 1 # out-of-date + elif missing == 'ignore': # missing source dropped from + continue # target's dependency list + elif missing == 'newer': # missing source means target is + return 1 # out-of-date source_mtime = os.stat(source)[ST_MTIME] if source_mtime > target_mtime: @@ -89,4 +92,5 @@ def newer_group (sources, target, missing='error'): else: return 0 + # newer_group () diff --git a/setuptools/_distutils/dir_util.py b/setuptools/_distutils/dir_util.py index d5cd8e3..2c19b98 100644 --- a/setuptools/_distutils/dir_util.py +++ b/setuptools/_distutils/dir_util.py @@ -30,7 +30,8 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # Detect a common bug -- name is None if not isinstance(name, str): raise DistutilsInternalError( - "mkpath: 'name' must be a string (got %r)" % (name,)) + "mkpath: 'name' must be a string (got %r)" % (name,) + ) # XXX what's the better way to handle verbosity? print as we create # each directory in the path (the current behaviour), or only announce @@ -45,17 +46,17 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): return created_dirs (head, tail) = os.path.split(name) - tails = [tail] # stack of lone dirs to create + tails = [tail] # stack of lone dirs to create while head and tail and not os.path.isdir(head): (head, tail) = os.path.split(head) - tails.insert(0, tail) # push next higher dir onto stack + tails.insert(0, tail) # push next higher dir onto stack # now 'head' contains the deepest directory that already exists # (that is, the child of 'head' in 'name' is the highest directory # that does *not* exist) for d in tails: - #print "head = %s, d = %s: " % (head, d), + # print "head = %s, d = %s: " % (head, d), head = os.path.join(head, d) abs_head = os.path.abspath(head) @@ -71,12 +72,14 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): except OSError as exc: if not (exc.errno == errno.EEXIST and os.path.isdir(head)): raise DistutilsFileError( - "could not create '%s': %s" % (head, exc.args[-1])) + "could not create '%s': %s" % (head, exc.args[-1]) + ) created_dirs.append(head) _path_created[abs_head] = 1 return created_dirs + def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): """Create all the empty directories under 'base_dir' needed to put 'files' there. @@ -96,8 +99,17 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): for dir in sorted(need_dir): mkpath(dir, mode, verbose=verbose, dry_run=dry_run) -def copy_tree(src, dst, preserve_mode=1, preserve_times=1, - preserve_symlinks=0, update=0, verbose=1, dry_run=0): + +def copy_tree( + src, + dst, + preserve_mode=1, + preserve_times=1, + preserve_symlinks=0, + update=0, + verbose=1, + dry_run=0, +): """Copy an entire directory tree 'src' to a new location 'dst'. Both 'src' and 'dst' must be directory names. If 'src' is not a @@ -120,8 +132,7 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1, from distutils.file_util import copy_file if not dry_run and not os.path.isdir(src): - raise DistutilsFileError( - "cannot copy tree '%s': not a directory" % src) + raise DistutilsFileError("cannot copy tree '%s': not a directory" % src) try: names = os.listdir(src) except OSError as e: @@ -129,7 +140,8 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1, names = [] else: raise DistutilsFileError( - "error listing files in '%s': %s" % (src, e.strerror)) + "error listing files in '%s': %s" % (src, e.strerror) + ) if not dry_run: mkpath(dst, verbose=verbose) @@ -154,27 +166,43 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1, elif os.path.isdir(src_name): outputs.extend( - copy_tree(src_name, dst_name, preserve_mode, - preserve_times, preserve_symlinks, update, - verbose=verbose, dry_run=dry_run)) + copy_tree( + src_name, + dst_name, + preserve_mode, + preserve_times, + preserve_symlinks, + update, + verbose=verbose, + dry_run=dry_run, + ) + ) else: - copy_file(src_name, dst_name, preserve_mode, - preserve_times, update, verbose=verbose, - dry_run=dry_run) + copy_file( + src_name, + dst_name, + preserve_mode, + preserve_times, + update, + verbose=verbose, + dry_run=dry_run, + ) outputs.append(dst_name) return outputs + def _build_cmdtuple(path, cmdtuples): """Helper for remove_tree().""" for f in os.listdir(path): - real_f = os.path.join(path,f) + real_f = os.path.join(path, f) if os.path.isdir(real_f) and not os.path.islink(real_f): _build_cmdtuple(real_f, cmdtuples) else: cmdtuples.append((os.remove, real_f)) cmdtuples.append((os.rmdir, path)) + def remove_tree(directory, verbose=1, dry_run=0): """Recursively remove an entire directory tree. @@ -199,6 +227,7 @@ def remove_tree(directory, verbose=1, dry_run=0): except OSError as exc: log.warn("error removing %s: %s", directory, exc) + def ensure_relative(path): """Take the full path 'path', and make it a relative path. diff --git a/setuptools/_distutils/dist.py b/setuptools/_distutils/dist.py index 4502497..82e3684 100644 --- a/setuptools/_distutils/dist.py +++ b/setuptools/_distutils/dist.py @@ -69,8 +69,7 @@ class Distribution: ('quiet', 'q', "run quietly (turns verbosity off)"), ('dry-run', 'n', "don't actually do anything"), ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, - 'ignore pydistutils.cfg in your home directory'), + ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'), ] # 'common_usage' is a short (2-3 line) string describing the common @@ -84,49 +83,32 @@ Common commands: (see '--help-commands' for more) # options that are not propagated to the commands display_options = [ - ('help-commands', None, - "list all available commands"), - ('name', None, - "print package name"), - ('version', 'V', - "print package version"), - ('fullname', None, - "print -"), - ('author', None, - "print the author's name"), - ('author-email', None, - "print the author's email address"), - ('maintainer', None, - "print the maintainer's name"), - ('maintainer-email', None, - "print the maintainer's email address"), - ('contact', None, - "print the maintainer's name if known, else the author's"), - ('contact-email', None, - "print the maintainer's email address if known, else the author's"), - ('url', None, - "print the URL for this package"), - ('license', None, - "print the license of the package"), - ('licence', None, - "alias for --license"), - ('description', None, - "print the package description"), - ('long-description', None, - "print the long package description"), - ('platforms', None, - "print the list of platforms"), - ('classifiers', None, - "print the list of classifiers"), - ('keywords', None, - "print the list of keywords"), - ('provides', None, - "print the list of packages/modules provided"), - ('requires', None, - "print the list of packages/modules required"), - ('obsoletes', None, - "print the list of packages/modules made obsolete") - ] + ('help-commands', None, "list all available commands"), + ('name', None, "print package name"), + ('version', 'V', "print package version"), + ('fullname', None, "print -"), + ('author', None, "print the author's name"), + ('author-email', None, "print the author's email address"), + ('maintainer', None, "print the maintainer's name"), + ('maintainer-email', None, "print the maintainer's email address"), + ('contact', None, "print the maintainer's name if known, else the author's"), + ( + 'contact-email', + None, + "print the maintainer's email address if known, else the author's", + ), + ('url', None, "print the URL for this package"), + ('license', None, "print the license of the package"), + ('licence', None, "alias for --license"), + ('description', None, "print the package description"), + ('long-description', None, "print the long package description"), + ('platforms', None, "print the list of platforms"), + ('classifiers', None, "print the list of classifiers"), + ('keywords', None, "print the list of keywords"), + ('provides', None, "print the list of packages/modules provided"), + ('requires', None, "print the list of packages/modules required"), + ('obsoletes', None, "print the list of packages/modules made obsolete"), + ] display_option_names = [translate_longopt(x[0]) for x in display_options] # negative options are options that exclude other options @@ -306,7 +288,7 @@ Common commands: (see '--help-commands' for more) def dump_option_dicts(self, header=None, commands=None, indent=""): from pprint import pformat - if commands is None: # dump all command option dicts + if commands is None: # dump all command option dicts commands = sorted(self.command_options.keys()) if header is not None: @@ -320,11 +302,9 @@ Common commands: (see '--help-commands' for more) for cmd_name in commands: opt_dict = self.command_options.get(cmd_name) if opt_dict is None: - self.announce(indent + - "no option dict for '%s' command" % cmd_name) + self.announce(indent + "no option dict for '%s' command" % cmd_name) else: - self.announce(indent + - "option dict for '%s' command:" % cmd_name) + self.announce(indent + "option dict for '%s' command:" % cmd_name) out = pformat(opt_dict) for line in out.split('\n'): self.announce(indent + " " + line) @@ -385,10 +365,20 @@ Common commands: (see '--help-commands' for more) # Ignore install directory options if we have a venv if sys.prefix != sys.base_prefix: ignore_options = [ - 'install-base', 'install-platbase', 'install-lib', - 'install-platlib', 'install-purelib', 'install-headers', - 'install-scripts', 'install-data', 'prefix', 'exec-prefix', - 'home', 'user', 'root'] + 'install-base', + 'install-platbase', + 'install-lib', + 'install-platlib', + 'install-purelib', + 'install-headers', + 'install-scripts', + 'install-data', + 'prefix', + 'exec-prefix', + 'home', + 'user', + 'root', + ] else: ignore_options = [] @@ -411,7 +401,7 @@ Common commands: (see '--help-commands' for more) for opt in options: if opt != '__name__' and opt not in ignore_options: - val = parser.get(section,opt) + val = parser.get(section, opt) opt = opt.replace('-', '_') opt_dict[opt] = (filename, val) @@ -428,7 +418,7 @@ Common commands: (see '--help-commands' for more) try: if alias: setattr(self, alias, not strtobool(val)) - elif opt in ('verbose', 'dry_run'): # ugh! + elif opt in ('verbose', 'dry_run'): # ugh! setattr(self, opt, strtobool(val)) else: setattr(self, opt, val) @@ -482,7 +472,7 @@ Common commands: (see '--help-commands' for more) return while args: args = self._parse_command_opts(parser, args) - if args is None: # user asked for help (and got it) + if args is None: # user asked for help (and got it) return # Handle the cases of --help as a "global" option, ie. @@ -492,9 +482,9 @@ Common commands: (see '--help-commands' for more) # latter, we omit the display-only options and show help for # each command listed on the command line. if self.help: - self._show_help(parser, - display_options=len(self.commands) == 0, - commands=self.commands) + self._show_help( + parser, display_options=len(self.commands) == 0, commands=self.commands + ) return # Oops, no commands found -- an end-user error @@ -511,9 +501,12 @@ Common commands: (see '--help-commands' for more) level as well as options recognized for commands. """ return self.global_options + [ - ("command-packages=", None, - "list of packages that provide distutils commands"), - ] + ( + "command-packages=", + None, + "list of packages that provide distutils commands", + ), + ] def _parse_command_opts(self, parser, args): """Parse the command-line options for a single command. @@ -545,14 +538,19 @@ Common commands: (see '--help-commands' for more) # to be sure that the basic "command" interface is implemented. if not issubclass(cmd_class, Command): raise DistutilsClassError( - "command class %s must subclass Command" % cmd_class) + "command class %s must subclass Command" % cmd_class + ) # Also make sure that the command object provides a list of its # known options. - if not (hasattr(cmd_class, 'user_options') and - isinstance(cmd_class.user_options, list)): - msg = ("command class %s must provide " - "'user_options' attribute (a list of tuples)") + if not ( + hasattr(cmd_class, 'user_options') + and isinstance(cmd_class.user_options, list) + ): + msg = ( + "command class %s must provide " + "'user_options' attribute (a list of tuples)" + ) raise DistutilsClassError(msg % cmd_class) # If the command class has a list of negative alias options, @@ -564,36 +562,39 @@ Common commands: (see '--help-commands' for more) # Check for help_options in command class. They have a different # format (tuple of four) so we need to preprocess them here. - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): + if hasattr(cmd_class, 'help_options') and isinstance( + cmd_class.help_options, list + ): help_options = fix_help_options(cmd_class.help_options) else: help_options = [] # All commands support the global options too, just by adding # in 'global_options'. - parser.set_option_table(self.global_options + - cmd_class.user_options + - help_options) + parser.set_option_table( + self.global_options + cmd_class.user_options + help_options + ) parser.set_negative_aliases(negative_opt) (args, opts) = parser.getopt(args[1:]) if hasattr(opts, 'help') and opts.help: self._show_help(parser, display_options=0, commands=[cmd_class]) return - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_option_found=0 + if hasattr(cmd_class, 'help_options') and isinstance( + cmd_class.help_options, list + ): + help_option_found = 0 for (help_option, short, desc, func) in cmd_class.help_options: if hasattr(opts, parser.get_attr_name(help_option)): - help_option_found=1 + help_option_found = 1 if callable(func): func() else: raise DistutilsClassError( "invalid help function %r for help option '%s': " "must be a callable object (function, etc.)" - % (func, help_option)) + % (func, help_option) + ) if help_option_found: return @@ -619,8 +620,7 @@ Common commands: (see '--help-commands' for more) value = [elm.strip() for elm in value.split(',')] setattr(self.metadata, attr, value) - def _show_help(self, parser, global_options=1, display_options=1, - commands=[]): + def _show_help(self, parser, global_options=1, display_options=1, commands=[]): """Show help for the setup script command-line in the form of several lists of command-line options. 'parser' should be a FancyGetopt instance; do not expect it to be returned in the @@ -649,8 +649,9 @@ Common commands: (see '--help-commands' for more) if display_options: parser.set_option_table(self.display_options) parser.print_help( - "Information display options (just display " + - "information, ignore any commands)") + "Information display options (just display " + + "information, ignore any commands)" + ) print('') for command in self.commands: @@ -658,10 +659,10 @@ Common commands: (see '--help-commands' for more) klass = command else: klass = self.get_command_class(command) - if (hasattr(klass, 'help_options') and - isinstance(klass.help_options, list)): - parser.set_option_table(klass.user_options + - fix_help_options(klass.help_options)) + if hasattr(klass, 'help_options') and isinstance(klass.help_options, list): + parser.set_option_table( + klass.user_options + fix_help_options(klass.help_options) + ) else: parser.set_option_table(klass.user_options) parser.print_help("Options for '%s' command:" % klass.__name__) @@ -697,11 +698,10 @@ Common commands: (see '--help-commands' for more) for (opt, val) in option_order: if val and is_display_option.get(opt): opt = translate_longopt(opt) - value = getattr(self.metadata, "get_"+opt)() + value = getattr(self.metadata, "get_" + opt)() if opt in ['keywords', 'platforms']: print(','.join(value)) - elif opt in ('classifiers', 'provides', 'requires', - 'obsoletes'): + elif opt in ('classifiers', 'provides', 'requires', 'obsoletes'): print('\n'.join(value)) else: print(value) @@ -735,6 +735,7 @@ Common commands: (see '--help-commands' for more) 'description'. """ import distutils.command + std_commands = distutils.command.__all__ is_std = {} for cmd in std_commands: @@ -746,18 +747,14 @@ Common commands: (see '--help-commands' for more) extra_commands.append(cmd) max_length = 0 - for cmd in (std_commands + extra_commands): + for cmd in std_commands + extra_commands: if len(cmd) > max_length: max_length = len(cmd) - self.print_command_list(std_commands, - "Standard commands", - max_length) + self.print_command_list(std_commands, "Standard commands", max_length) if extra_commands: print() - self.print_command_list(extra_commands, - "Extra commands", - max_length) + self.print_command_list(extra_commands, "Extra commands", max_length) def get_command_list(self): """Get a list of (command, description) tuples. @@ -769,6 +766,7 @@ Common commands: (see '--help-commands' for more) # Currently this is only used on Mac OS, for the Mac-only GUI # Distutils interface (by Jack Jansen) import distutils.command + std_commands = distutils.command.__all__ is_std = {} for cmd in std_commands: @@ -780,7 +778,7 @@ Common commands: (see '--help-commands' for more) extra_commands.append(cmd) rv = [] - for cmd in (std_commands + extra_commands): + for cmd in std_commands + extra_commands: klass = self.cmdclass.get(cmd) if not klass: klass = self.get_command_class(cmd) @@ -836,7 +834,8 @@ Common commands: (see '--help-commands' for more) except AttributeError: raise DistutilsModuleError( "invalid command '%s' (no class '%s' in module '%s')" - % (command, klass_name, module_name)) + % (command, klass_name, module_name) + ) self.cmdclass[command] = klass return klass @@ -852,8 +851,10 @@ Common commands: (see '--help-commands' for more) cmd_obj = self.command_obj.get(command) if not cmd_obj and create: if DEBUG: - self.announce("Distribution.get_command_obj(): " - "creating '%s' command object" % command) + self.announce( + "Distribution.get_command_obj(): " + "creating '%s' command object" % command + ) klass = self.get_command_class(command) cmd_obj = self.command_obj[command] = klass(self) @@ -887,11 +888,9 @@ Common commands: (see '--help-commands' for more) self.announce(" setting options for '%s' command:" % command_name) for (option, (source, value)) in option_dict.items(): if DEBUG: - self.announce(" %s = %s (from %s)" % (option, value, - source)) + self.announce(" %s = %s (from %s)" % (option, value, source)) try: - bool_opts = [translate_longopt(o) - for o in command_obj.boolean_options] + bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] except AttributeError: bool_opts = [] try: @@ -910,7 +909,8 @@ Common commands: (see '--help-commands' for more) else: raise DistutilsOptionError( "error in %s: command '%s' has no such option '%s'" - % (source, command_name, option)) + % (source, command_name, option) + ) except ValueError as msg: raise DistutilsOptionError(msg) @@ -934,6 +934,7 @@ Common commands: (see '--help-commands' for more) Returns the reinitialized command object. """ from distutils.cmd import Command + if not isinstance(command, Command): command_name = command command = self.get_command_obj(command_name) @@ -1010,9 +1011,11 @@ Common commands: (see '--help-commands' for more) return self.data_files and len(self.data_files) > 0 def is_pure(self): - return (self.has_pure_modules() and - not self.has_ext_modules() and - not self.has_c_libraries()) + return ( + self.has_pure_modules() + and not self.has_ext_modules() + and not self.has_c_libraries() + ) # -- Metadata query methods ---------------------------------------- @@ -1021,19 +1024,35 @@ Common commands: (see '--help-commands' for more) # to self.metadata.get_XXX. The actual code is in the # DistributionMetadata class, below. + class DistributionMetadata: """Dummy class to hold the distribution meta-data: name, version, author, and so forth. """ - _METHOD_BASENAMES = ("name", "version", "author", "author_email", - "maintainer", "maintainer_email", "url", - "license", "description", "long_description", - "keywords", "platforms", "fullname", "contact", - "contact_email", "classifiers", "download_url", - # PEP 314 - "provides", "requires", "obsoletes", - ) + _METHOD_BASENAMES = ( + "name", + "version", + "author", + "author_email", + "maintainer", + "maintainer_email", + "url", + "license", + "description", + "long_description", + "keywords", + "platforms", + "fullname", + "contact", + "contact_email", + "classifiers", + "download_url", + # PEP 314 + "provides", + "requires", + "obsoletes", + ) def __init__(self, path=None): if path is not None: @@ -1110,18 +1129,22 @@ class DistributionMetadata: self.obsoletes = None def write_pkg_info(self, base_dir): - """Write the PKG-INFO file into the release tree. - """ - with open(os.path.join(base_dir, 'PKG-INFO'), 'w', - encoding='UTF-8') as pkg_info: + """Write the PKG-INFO file into the release tree.""" + with open( + os.path.join(base_dir, 'PKG-INFO'), 'w', encoding='UTF-8' + ) as pkg_info: self.write_pkg_file(pkg_info) def write_pkg_file(self, file): - """Write the PKG-INFO format data to a file object. - """ + """Write the PKG-INFO format data to a file object.""" version = '1.0' - if (self.provides or self.requires or self.obsoletes or - self.classifiers or self.download_url): + if ( + self.provides + or self.requires + or self.obsoletes + or self.classifiers + or self.download_url + ): version = '1.1' # required fields @@ -1190,6 +1213,7 @@ class DistributionMetadata: def get_license(self): return self.license + get_licence = get_license def get_description(self): @@ -1225,6 +1249,7 @@ class DistributionMetadata: def set_requires(self, value): import distutils.versionpredicate + for v in value: distutils.versionpredicate.VersionPredicate(v) self.requires = list(value) @@ -1236,6 +1261,7 @@ class DistributionMetadata: value = [v.strip() for v in value] for v in value: import distutils.versionpredicate + distutils.versionpredicate.split_provision(v) self.provides = value @@ -1244,10 +1270,12 @@ class DistributionMetadata: def set_obsoletes(self, value): import distutils.versionpredicate + for v in value: distutils.versionpredicate.VersionPredicate(v) self.obsoletes = list(value) + def fix_help_options(options): """Convert a 4-tuple 'help_options' list as found in various command classes to the 3-tuple form required by FancyGetopt. diff --git a/setuptools/_distutils/errors.py b/setuptools/_distutils/errors.py index 8b93059..626254c 100644 --- a/setuptools/_distutils/errors.py +++ b/setuptools/_distutils/errors.py @@ -8,90 +8,120 @@ usually raised for errors that are obviously the end-user's fault This module is safe to use in "from ... import *" mode; it only exports symbols whose names start with "Distutils" and end with "Error".""" -class DistutilsError (Exception): + +class DistutilsError(Exception): """The root of all Distutils evil.""" + pass -class DistutilsModuleError (DistutilsError): + +class DistutilsModuleError(DistutilsError): """Unable to load an expected module, or to find an expected class within some module (in particular, command modules and classes).""" + pass -class DistutilsClassError (DistutilsError): + +class DistutilsClassError(DistutilsError): """Some command class (or possibly distribution class, if anyone feels a need to subclass Distribution) is found not to be holding up its end of the bargain, ie. implementing some part of the "command "interface.""" + pass -class DistutilsGetoptError (DistutilsError): + +class DistutilsGetoptError(DistutilsError): """The option table provided to 'fancy_getopt()' is bogus.""" + pass -class DistutilsArgError (DistutilsError): + +class DistutilsArgError(DistutilsError): """Raised by fancy_getopt in response to getopt.error -- ie. an error in the command line usage.""" + pass -class DistutilsFileError (DistutilsError): + +class DistutilsFileError(DistutilsError): """Any problems in the filesystem: expected file not found, etc. Typically this is for problems that we detect before OSError could be raised.""" + pass -class DistutilsOptionError (DistutilsError): + +class DistutilsOptionError(DistutilsError): """Syntactic/semantic errors in command options, such as use of mutually conflicting options, or inconsistent options, badly-spelled values, etc. No distinction is made between option values originating in the setup script, the command line, config files, or what-have-you -- but if we *know* something originated in the setup script, we'll raise DistutilsSetupError instead.""" + pass -class DistutilsSetupError (DistutilsError): + +class DistutilsSetupError(DistutilsError): """For errors that can be definitely blamed on the setup script, such as invalid keyword arguments to 'setup()'.""" + pass -class DistutilsPlatformError (DistutilsError): + +class DistutilsPlatformError(DistutilsError): """We don't know how to do something on the current platform (but we do know how to do it on some platform) -- eg. trying to compile C files on a platform not supported by a CCompiler subclass.""" + pass -class DistutilsExecError (DistutilsError): + +class DistutilsExecError(DistutilsError): """Any problems executing an external program (such as the C compiler, when compiling C files).""" + pass -class DistutilsInternalError (DistutilsError): + +class DistutilsInternalError(DistutilsError): """Internal inconsistencies or impossibilities (obviously, this should never be seen if the code is working!).""" + pass -class DistutilsTemplateError (DistutilsError): + +class DistutilsTemplateError(DistutilsError): """Syntax error in a file list template.""" + class DistutilsByteCompileError(DistutilsError): """Byte compile error.""" + # Exception classes used by the CCompiler implementation classes -class CCompilerError (Exception): +class CCompilerError(Exception): """Some compile/link operation failed.""" -class PreprocessError (CCompilerError): + +class PreprocessError(CCompilerError): """Failure to preprocess one or more C/C++ files.""" -class CompileError (CCompilerError): + +class CompileError(CCompilerError): """Failure to compile one or more C/C++ source files.""" -class LibError (CCompilerError): + +class LibError(CCompilerError): """Failure to create a static library from one or more C/C++ object files.""" -class LinkError (CCompilerError): + +class LinkError(CCompilerError): """Failure to link one or more C/C++ object files into an executable or shared library file.""" -class UnknownFileError (CCompilerError): + +class UnknownFileError(CCompilerError): """Attempt to process an unknown file type.""" diff --git a/setuptools/_distutils/extension.py b/setuptools/_distutils/extension.py index c507da3..b05c5d9 100644 --- a/setuptools/_distutils/extension.py +++ b/setuptools/_distutils/extension.py @@ -16,6 +16,7 @@ import warnings # import that large-ish module (indirectly, through distutils.core) in # order to do anything. + class Extension: """Just a collection of attributes that describes an extension module and everything needed to build it (hopefully in a portable @@ -83,27 +84,29 @@ class Extension: # When adding arguments to this constructor, be sure to update # setup_keywords in core.py. - def __init__(self, name, sources, - include_dirs=None, - define_macros=None, - undef_macros=None, - library_dirs=None, - libraries=None, - runtime_library_dirs=None, - extra_objects=None, - extra_compile_args=None, - extra_link_args=None, - export_symbols=None, - swig_opts = None, - depends=None, - language=None, - optional=None, - **kw # To catch unknown keywords - ): + def __init__( + self, + name, + sources, + include_dirs=None, + define_macros=None, + undef_macros=None, + library_dirs=None, + libraries=None, + runtime_library_dirs=None, + extra_objects=None, + extra_compile_args=None, + extra_link_args=None, + export_symbols=None, + swig_opts=None, + depends=None, + language=None, + optional=None, + **kw # To catch unknown keywords + ): if not isinstance(name, str): raise AssertionError("'name' must be a string") - if not (isinstance(sources, list) and - all(isinstance(v, str) for v in sources)): + if not (isinstance(sources, list) and all(isinstance(v, str) for v in sources)): raise AssertionError("'sources' must be a list of strings") self.name = name @@ -135,13 +138,13 @@ class Extension: self.__class__.__module__, self.__class__.__qualname__, self.name, - id(self)) + id(self), + ) def read_setup_file(filename): """Reads a Setup file and returns Extension instances.""" - from distutils.sysconfig import (parse_makefile, expand_makefile_vars, - _variable_rx) + from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx from distutils.text_file import TextFile from distutils.util import split_quoted @@ -151,17 +154,22 @@ def read_setup_file(filename): # Second pass to gobble up the real content: lines of the form # ... [ ...] [ ...] [ ...] - file = TextFile(filename, - strip_comments=1, skip_blanks=1, join_lines=1, - lstrip_ws=1, rstrip_ws=1) + file = TextFile( + filename, + strip_comments=1, + skip_blanks=1, + join_lines=1, + lstrip_ws=1, + rstrip_ws=1, + ) try: extensions = [] while True: line = file.readline() - if line is None: # eof + if line is None: # eof break - if _variable_rx.match(line): # VAR=VALUE, handled in first pass + if _variable_rx.match(line): # VAR=VALUE, handled in first pass continue if line[0] == line[-1] == "*": @@ -188,7 +196,8 @@ def read_setup_file(filename): continue suffix = os.path.splitext(word)[1] - switch = word[0:2] ; value = word[2:] + switch = word[0:2] + value = word[2:] if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): # hmm, should we do something about C vs. C++ sources? @@ -199,14 +208,13 @@ def read_setup_file(filename): ext.include_dirs.append(value) elif switch == "-D": equals = value.find("=") - if equals == -1: # bare "-DFOO" -- no value + if equals == -1: # bare "-DFOO" -- no value ext.define_macros.append((value, None)) - else: # "-DFOO=blah" - ext.define_macros.append((value[0:equals], - value[equals+2:])) + else: # "-DFOO=blah" + ext.define_macros.append((value[0:equals], value[equals + 2 :])) elif switch == "-U": ext.undef_macros.append(value) - elif switch == "-C": # only here 'cause makesetup has it! + elif switch == "-C": # only here 'cause makesetup has it! ext.extra_compile_args.append(word) elif switch == "-l": ext.libraries.append(value) diff --git a/setuptools/_distutils/fancy_getopt.py b/setuptools/_distutils/fancy_getopt.py index 7d170dd..b9b2176 100644 --- a/setuptools/_distutils/fancy_getopt.py +++ b/setuptools/_distutils/fancy_getopt.py @@ -26,6 +26,7 @@ neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat)) # (for use as attributes of some object). longopt_xlate = str.maketrans('-', '_') + class FancyGetopt: """Wrapper around the standard 'getopt()' module that provides some handy extra functionality: @@ -90,7 +91,8 @@ class FancyGetopt: def add_option(self, long_option, short_option=None, help_string=None): if long_option in self.option_index: raise DistutilsGetoptError( - "option conflict: already an option '%s'" % long_option) + "option conflict: already an option '%s'" % long_option + ) else: option = (long_option, short_option, help_string) self.option_table.append(option) @@ -111,11 +113,15 @@ class FancyGetopt: assert isinstance(aliases, dict) for (alias, opt) in aliases.items(): if alias not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "option '%s' not defined") % (what, alias, alias)) + raise DistutilsGetoptError( + ("invalid %s '%s': " "option '%s' not defined") + % (what, alias, alias) + ) if opt not in self.option_index: - raise DistutilsGetoptError(("invalid %s '%s': " - "aliased option '%s' not defined") % (what, alias, opt)) + raise DistutilsGetoptError( + ("invalid %s '%s': " "aliased option '%s' not defined") + % (what, alias, opt) + ) def set_aliases(self, alias): """Set the aliases for this option parser.""" @@ -153,19 +159,23 @@ class FancyGetopt: # Type- and value-check the option names if not isinstance(long, str) or len(long) < 2: - raise DistutilsGetoptError(("invalid long option '%s': " - "must be a string of length >= 2") % long) + raise DistutilsGetoptError( + ("invalid long option '%s': " "must be a string of length >= 2") + % long + ) - if (not ((short is None) or - (isinstance(short, str) and len(short) == 1))): - raise DistutilsGetoptError("invalid short option '%s': " - "must a single character or None" % short) + if not ((short is None) or (isinstance(short, str) and len(short) == 1)): + raise DistutilsGetoptError( + "invalid short option '%s': " + "must a single character or None" % short + ) self.repeat[long] = repeat self.long_opts.append(long) - if long[-1] == '=': # option takes an argument? - if short: short = short + ':' + if long[-1] == '=': # option takes an argument? + if short: + short = short + ':' long = long[0:-1] self.takes_arg[long] = 1 else: @@ -175,11 +185,11 @@ class FancyGetopt: if alias_to is not None: if self.takes_arg[alias_to]: raise DistutilsGetoptError( - "invalid negative alias '%s': " - "aliased option '%s' takes a value" - % (long, alias_to)) + "invalid negative alias '%s': " + "aliased option '%s' takes a value" % (long, alias_to) + ) - self.long_opts[-1] = long # XXX redundant?! + self.long_opts[-1] = long # XXX redundant?! self.takes_arg[long] = 0 # If this is an alias option, make sure its "takes arg" flag is @@ -188,10 +198,10 @@ class FancyGetopt: if alias_to is not None: if self.takes_arg[long] != self.takes_arg[alias_to]: raise DistutilsGetoptError( - "invalid alias '%s': inconsistent with " - "aliased option '%s' (one of them takes a value, " - "the other doesn't" - % (long, alias_to)) + "invalid alias '%s': inconsistent with " + "aliased option '%s' (one of them takes a value, " + "the other doesn't" % (long, alias_to) + ) # Now enforce some bondage on the long option name, so we can # later translate it to an attribute name on some object. Have @@ -199,8 +209,9 @@ class FancyGetopt: # '='. if not longopt_re.match(long): raise DistutilsGetoptError( - "invalid long option name '%s' " - "(must be letters, numbers, hyphens only" % long) + "invalid long option name '%s' " + "(must be letters, numbers, hyphens only" % long + ) self.attr_name[long] = self.get_attr_name(long) if short: @@ -235,7 +246,7 @@ class FancyGetopt: raise DistutilsArgError(msg) for opt, val in opts: - if len(opt) == 2 and opt[0] == '-': # it's a short option + if len(opt) == 2 and opt[0] == '-': # it's a short option opt = self.short2long[opt[1]] else: assert len(opt) > 2 and opt[:2] == '--' @@ -245,7 +256,7 @@ class FancyGetopt: if alias: opt = alias - if not self.takes_arg[opt]: # boolean option? + if not self.takes_arg[opt]: # boolean option? assert val == '', "boolean option can't have value" alias = self.negative_alias.get(opt) if alias: @@ -294,11 +305,11 @@ class FancyGetopt: if long[-1] == '=': l = l - 1 if short is not None: - l = l + 5 # " (-x)" where short == 'x' + l = l + 5 # " (-x)" where short == 'x' if l > max_opt: max_opt = l - opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter + opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter # Typical help block looks like this: # --foo controls foonabulation @@ -348,8 +359,7 @@ class FancyGetopt: else: opt_names = "%s (-%s)" % (long, short) if text: - lines.append(" --%-*s %s" % - (max_opt, opt_names, text[0])) + lines.append(" --%-*s %s" % (max_opt, opt_names, text[0])) else: lines.append(" --%-*s" % opt_names) @@ -370,7 +380,8 @@ def fancy_getopt(options, negative_opt, object, args): return parser.getopt(args, object) -WS_TRANS = {ord(_wschar) : ' ' for _wschar in string.whitespace} +WS_TRANS = {ord(_wschar): ' ' for _wschar in string.whitespace} + def wrap_text(text, width): """wrap_text(text : string, width : int) -> [string] @@ -386,26 +397,26 @@ def wrap_text(text, width): text = text.expandtabs() text = text.translate(WS_TRANS) chunks = re.split(r'( +|-+)', text) - chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings + chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings lines = [] while chunks: - cur_line = [] # list of chunks (to-be-joined) - cur_len = 0 # length of current line + cur_line = [] # list of chunks (to-be-joined) + cur_len = 0 # length of current line while chunks: l = len(chunks[0]) - if cur_len + l <= width: # can squeeze (at least) this chunk in + if cur_len + l <= width: # can squeeze (at least) this chunk in cur_line.append(chunks[0]) del chunks[0] cur_len = cur_len + l - else: # this line is full + else: # this line is full # drop last chunk if all space if cur_line and cur_line[-1][0] == ' ': del cur_line[-1] break - if chunks: # any chunks left to process? + if chunks: # any chunks left to process? # if the current line is still empty, then we had a single # chunk that's too big too fit on a line -- so we break # down and break it up at the line width diff --git a/setuptools/_distutils/file_util.py b/setuptools/_distutils/file_util.py index b3fee35..4ff2230 100644 --- a/setuptools/_distutils/file_util.py +++ b/setuptools/_distutils/file_util.py @@ -8,12 +8,10 @@ from distutils.errors import DistutilsFileError from distutils import log # for generating verbose output in 'copy_file()' -_copy_action = { None: 'copying', - 'hard': 'hard linking', - 'sym': 'symbolically linking' } +_copy_action = {None: 'copying', 'hard': 'hard linking', 'sym': 'symbolically linking'} -def _copy_file_contents(src, dst, buffer_size=16*1024): +def _copy_file_contents(src, dst, buffer_size=16 * 1024): """Copy the file 'src' to 'dst'; both must be filenames. Any error opening either file, reading from 'src', or writing to 'dst', raises DistutilsFileError. Data is read/written in chunks of 'buffer_size' @@ -35,20 +33,21 @@ def _copy_file_contents(src, dst, buffer_size=16*1024): os.unlink(dst) except OSError as e: raise DistutilsFileError( - "could not delete '%s': %s" % (dst, e.strerror)) + "could not delete '%s': %s" % (dst, e.strerror) + ) try: fdst = open(dst, 'wb') except OSError as e: - raise DistutilsFileError( - "could not create '%s': %s" % (dst, e.strerror)) + raise DistutilsFileError("could not create '%s': %s" % (dst, e.strerror)) while True: try: buf = fsrc.read(buffer_size) except OSError as e: raise DistutilsFileError( - "could not read from '%s': %s" % (src, e.strerror)) + "could not read from '%s': %s" % (src, e.strerror) + ) if not buf: break @@ -57,15 +56,25 @@ def _copy_file_contents(src, dst, buffer_size=16*1024): fdst.write(buf) except OSError as e: raise DistutilsFileError( - "could not write to '%s': %s" % (dst, e.strerror)) + "could not write to '%s': %s" % (dst, e.strerror) + ) finally: if fdst: fdst.close() if fsrc: fsrc.close() -def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, - link=None, verbose=1, dry_run=0): + +def copy_file( + src, + dst, + preserve_mode=1, + preserve_times=1, + update=0, + link=None, + verbose=1, + dry_run=0, +): """Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is copied there with the same name; otherwise, it must be a filename. (If the file exists, it will be ruthlessly clobbered.) If 'preserve_mode' @@ -102,7 +111,8 @@ def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, if not os.path.isfile(src): raise DistutilsFileError( - "can't copy '%s': doesn't exist or not a regular file" % src) + "can't copy '%s': doesn't exist or not a regular file" % src + ) if os.path.isdir(dst): dir = dst @@ -163,9 +173,7 @@ def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, # XXX I suspect this is Unix-specific -- need porting help! -def move_file (src, dst, - verbose=1, - dry_run=0): +def move_file(src, dst, verbose=1, dry_run=0): """Move a file 'src' to 'dst'. If 'dst' is a directory, the file will be moved into it with the same name; otherwise, 'src' is just renamed @@ -190,13 +198,13 @@ def move_file (src, dst, dst = os.path.join(dst, basename(src)) elif exists(dst): raise DistutilsFileError( - "can't move '%s': destination '%s' already exists" % - (src, dst)) + "can't move '%s': destination '%s' already exists" % (src, dst) + ) if not isdir(dirname(dst)): raise DistutilsFileError( - "can't move '%s': destination '%s' not a valid path" % - (src, dst)) + "can't move '%s': destination '%s' not a valid path" % (src, dst) + ) copy_it = False try: @@ -206,8 +214,7 @@ def move_file (src, dst, if num == errno.EXDEV: copy_it = True else: - raise DistutilsFileError( - "couldn't move '%s' to '%s': %s" % (src, dst, msg)) + raise DistutilsFileError("couldn't move '%s' to '%s': %s" % (src, dst, msg)) if copy_it: copy_file(src, dst, verbose=verbose) @@ -220,13 +227,13 @@ def move_file (src, dst, except OSError: pass raise DistutilsFileError( - "couldn't move '%s' to '%s' by copy/delete: " - "delete '%s' failed: %s" - % (src, dst, src, msg)) + "couldn't move '%s' to '%s' by copy/delete: " + "delete '%s' failed: %s" % (src, dst, src, msg) + ) return dst -def write_file (filename, contents): +def write_file(filename, contents): """Create a file with the specified name and write 'contents' (a sequence of strings without line terminators) to it. """ diff --git a/setuptools/_distutils/filelist.py b/setuptools/_distutils/filelist.py index 82a7738..37ab341 100644 --- a/setuptools/_distutils/filelist.py +++ b/setuptools/_distutils/filelist.py @@ -46,6 +46,7 @@ class FileList: DISTUTILS_DEBUG environment variable) flag is true. """ from distutils.debug import DEBUG + if DEBUG: print(msg) @@ -80,22 +81,24 @@ class FileList: patterns = dir = dir_pattern = None - if action in ('include', 'exclude', - 'global-include', 'global-exclude'): + if action in ('include', 'exclude', 'global-include', 'global-exclude'): if len(words) < 2: raise DistutilsTemplateError( - "'%s' expects ..." % action) + "'%s' expects ..." % action + ) patterns = [convert_path(w) for w in words[1:]] elif action in ('recursive-include', 'recursive-exclude'): if len(words) < 3: raise DistutilsTemplateError( - "'%s' expects ..." % action) + "'%s' expects ..." % action + ) dir = convert_path(words[1]) patterns = [convert_path(w) for w in words[2:]] elif action in ('graft', 'prune'): if len(words) != 2: raise DistutilsTemplateError( - "'%s' expects a single " % action) + "'%s' expects a single " % action + ) dir_pattern = convert_path(words[1]) else: raise DistutilsTemplateError("unknown action '%s'" % action) @@ -117,65 +120,82 @@ class FileList: self.debug_print("include " + ' '.join(patterns)) for pattern in patterns: if not self.include_pattern(pattern, anchor=1): - log.warn("warning: no files found matching '%s'", - pattern) + log.warn("warning: no files found matching '%s'", pattern) elif action == 'exclude': self.debug_print("exclude " + ' '.join(patterns)) for pattern in patterns: if not self.exclude_pattern(pattern, anchor=1): - log.warn(("warning: no previously-included files " - "found matching '%s'"), pattern) + log.warn( + ( + "warning: no previously-included files " + "found matching '%s'" + ), + pattern, + ) elif action == 'global-include': self.debug_print("global-include " + ' '.join(patterns)) for pattern in patterns: if not self.include_pattern(pattern, anchor=0): - log.warn(("warning: no files found matching '%s' " - "anywhere in distribution"), pattern) + log.warn( + ( + "warning: no files found matching '%s' " + "anywhere in distribution" + ), + pattern, + ) elif action == 'global-exclude': self.debug_print("global-exclude " + ' '.join(patterns)) for pattern in patterns: if not self.exclude_pattern(pattern, anchor=0): - log.warn(("warning: no previously-included files matching " - "'%s' found anywhere in distribution"), - pattern) + log.warn( + ( + "warning: no previously-included files matching " + "'%s' found anywhere in distribution" + ), + pattern, + ) elif action == 'recursive-include': - self.debug_print("recursive-include %s %s" % - (dir, ' '.join(patterns))) + self.debug_print("recursive-include %s %s" % (dir, ' '.join(patterns))) for pattern in patterns: if not self.include_pattern(pattern, prefix=dir): msg = ( - "warning: no files found matching '%s' " - "under directory '%s'" + "warning: no files found matching '%s' " "under directory '%s'" ) log.warn(msg, pattern, dir) elif action == 'recursive-exclude': - self.debug_print("recursive-exclude %s %s" % - (dir, ' '.join(patterns))) + self.debug_print("recursive-exclude %s %s" % (dir, ' '.join(patterns))) for pattern in patterns: if not self.exclude_pattern(pattern, prefix=dir): - log.warn(("warning: no previously-included files matching " - "'%s' found under directory '%s'"), - pattern, dir) + log.warn( + ( + "warning: no previously-included files matching " + "'%s' found under directory '%s'" + ), + pattern, + dir, + ) elif action == 'graft': self.debug_print("graft " + dir_pattern) if not self.include_pattern(None, prefix=dir_pattern): - log.warn("warning: no directories found matching '%s'", - dir_pattern) + log.warn("warning: no directories found matching '%s'", dir_pattern) elif action == 'prune': self.debug_print("prune " + dir_pattern) if not self.exclude_pattern(None, prefix=dir_pattern): - log.warn(("no previously-included directories found " - "matching '%s'"), dir_pattern) + log.warn( + ("no previously-included directories found " "matching '%s'"), + dir_pattern, + ) else: raise DistutilsInternalError( - "this cannot happen: invalid action '%s'" % action) + "this cannot happen: invalid action '%s'" % action + ) # Filtering/selection methods @@ -207,8 +227,7 @@ class FileList: # XXX docstring lying about what the special chars are? files_found = False pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("include_pattern: applying regex r'%s'" % - pattern_re.pattern) + self.debug_print("include_pattern: applying regex r'%s'" % pattern_re.pattern) # delayed loading of allfiles list if self.allfiles is None: @@ -221,8 +240,7 @@ class FileList: files_found = True return files_found - def exclude_pattern( - self, pattern, anchor=1, prefix=None, is_regex=0): + def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): """Remove strings (presumably filenames) from 'files' that match 'pattern'. Other parameters are the same as for 'include_pattern()', above. @@ -231,9 +249,8 @@ class FileList: """ files_found = False pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) - self.debug_print("exclude_pattern: applying regex r'%s'" % - pattern_re.pattern) - for i in range(len(self.files)-1, -1, -1): + self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern) + for i in range(len(self.files) - 1, -1, -1): if pattern_re.search(self.files[i]): self.debug_print(" removing " + self.files[i]) del self.files[i] @@ -243,15 +260,14 @@ class FileList: # Utility functions + def _find_all_simple(path): """ Find all files under 'path' """ all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True)) results = ( - os.path.join(base, file) - for base, dirs, files in all_unique - for file in files + os.path.join(base, file) for base, dirs, files in all_unique for file in files ) return filter(os.path.isfile, results) @@ -262,6 +278,7 @@ class _UniqueDirs(set): avoiding infinite recursion. Ref https://bugs.python.org/issue44497. """ + def __call__(self, walk_item): """ Given an item from an os.walk result, determine @@ -341,15 +358,14 @@ def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0): if prefix is not None: prefix_re = glob_to_re(prefix) assert prefix_re.startswith(start) and prefix_re.endswith(end) - prefix_re = prefix_re[len(start): len(prefix_re) - len(end)] + prefix_re = prefix_re[len(start) : len(prefix_re) - len(end)] sep = os.sep if os.sep == '\\': sep = r'\\' - pattern_re = pattern_re[len(start): len(pattern_re) - len(end)] - pattern_re = r'%s\A%s%s.*%s%s' % ( - start, prefix_re, sep, pattern_re, end) - else: # no prefix -- respect anchor flag + pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)] + pattern_re = r'%s\A%s%s.*%s%s' % (start, prefix_re, sep, pattern_re, end) + else: # no prefix -- respect anchor flag if anchor: - pattern_re = r'%s\A%s' % (start, pattern_re[len(start):]) + pattern_re = r'%s\A%s' % (start, pattern_re[len(start) :]) return re.compile(pattern_re) diff --git a/setuptools/_distutils/log.py b/setuptools/_distutils/log.py index a68b156..be25f6c 100644 --- a/setuptools/_distutils/log.py +++ b/setuptools/_distutils/log.py @@ -13,7 +13,6 @@ FATAL = 5 class Log: - def __init__(self, threshold=WARN): self.threshold = threshold diff --git a/setuptools/_distutils/msvc9compiler.py b/setuptools/_distutils/msvc9compiler.py index 6b62738..225f1a2 100644 --- a/setuptools/_distutils/msvc9compiler.py +++ b/setuptools/_distutils/msvc9compiler.py @@ -17,8 +17,13 @@ import subprocess import sys import re -from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError +from distutils.errors import ( + DistutilsExecError, + DistutilsPlatformError, + CompileError, + LibError, + LinkError, +) from distutils.ccompiler import CCompiler, gen_lib_options from distutils import log from distutils.util import get_platform @@ -30,12 +35,14 @@ RegEnumKey = winreg.EnumKey RegEnumValue = winreg.EnumValue RegError = winreg.error -HKEYS = (winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT) +HKEYS = ( + winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT, +) -NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32) +NATIVE_WIN64 = sys.platform == 'win32' and sys.maxsize > 2**32 if NATIVE_WIN64: # Visual C++ is a 32-bit application, so we need to look in # the corresponding registry branch, if we're running a @@ -52,13 +59,13 @@ else: # 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is # the param to cross-compile on x86 targeting amd64.) PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'amd64', + 'win32': 'x86', + 'win-amd64': 'amd64', } + class Reg: - """Helper class to read values from the registry - """ + """Helper class to read values from the registry""" def get_value(cls, path, key): for base in HKEYS: @@ -66,6 +73,7 @@ class Reg: if d and key in d: return d[key] raise KeyError(key) + get_value = classmethod(get_value) def read_keys(cls, base, key): @@ -84,6 +92,7 @@ class Reg: L.append(k) i += 1 return L + read_keys = classmethod(read_keys) def read_values(cls, base, key): @@ -106,6 +115,7 @@ class Reg: d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) i += 1 return d + read_values = classmethod(read_values) def convert_mbcs(s): @@ -116,10 +126,11 @@ class Reg: except UnicodeError: pass return s + convert_mbcs = staticmethod(convert_mbcs) -class MacroExpander: +class MacroExpander: def __init__(self, version): self.macros = {} self.vsbase = VS_BASE % version @@ -134,16 +145,16 @@ class MacroExpander: self.set_macro("FrameworkDir", NET_BASE, "installroot") try: if version >= 8.0: - self.set_macro("FrameworkSDKDir", NET_BASE, - "sdkinstallrootv2.0") + self.set_macro("FrameworkSDKDir", NET_BASE, "sdkinstallrootv2.0") else: raise KeyError("sdkinstallrootv2.0") except KeyError: raise DistutilsPlatformError( - """Python was built with Visual Studio 2008; + """Python was built with Visual Studio 2008; extensions must be built with a compiler than can generate compatible binaries. Visual Studio 2008 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""" + ) if version >= 9.0: self.set_macro("FrameworkVersion", self.vsbase, "clr version") @@ -164,6 +175,7 @@ you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") s = s.replace(k, v) return s + def get_build_version(): """Return the version of MSVC that was used to build Python. @@ -189,6 +201,7 @@ def get_build_version(): # else we don't know what version of the compiler this is return None + def normalize_and_reduce_paths(paths): """Return a list of normalized paths with duplicates removed. @@ -203,9 +216,9 @@ def normalize_and_reduce_paths(paths): reduced_paths.append(np) return reduced_paths + def removeDuplicates(variable): - """Remove duplicate values of an environment variable. - """ + """Remove duplicate values of an environment variable.""" oldList = variable.split(os.pathsep) newList = [] for i in oldList: @@ -214,6 +227,7 @@ def removeDuplicates(variable): newVariable = os.pathsep.join(newList) return newVariable + def find_vcvarsall(version): """Find the vcvarsall.bat file @@ -222,8 +236,7 @@ def find_vcvarsall(version): """ vsbase = VS_BASE % version try: - productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, - "productdir") + productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, "productdir") except KeyError: log.debug("Unable to find productdir in registry") productdir = None @@ -249,9 +262,9 @@ def find_vcvarsall(version): log.debug("Unable to find vcvarsall.bat") return None + def query_vcvarsall(version, arch="x86"): - """Launch vcvarsall.bat and read the settings from its environment - """ + """Launch vcvarsall.bat and read the settings from its environment""" vcvarsall = find_vcvarsall(version) interesting = {"include", "lib", "libpath", "path"} result = {} @@ -259,9 +272,11 @@ def query_vcvarsall(version, arch="x86"): if vcvarsall is None: raise DistutilsPlatformError("Unable to find vcvarsall.bat") log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) - popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + popen = subprocess.Popen( + '"%s" %s & set' % (vcvarsall, arch), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) try: stdout, stderr = popen.communicate() if popen.wait() != 0: @@ -289,13 +304,15 @@ def query_vcvarsall(version, arch="x86"): return result + # More globals VERSION = get_build_version() # MACROS = MacroExpander(VERSION) -class MSVCCompiler(CCompiler) : + +class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" + as defined by the CCompiler abstract class.""" compiler_type = 'msvc' @@ -314,8 +331,7 @@ class MSVCCompiler(CCompiler) : # Needed for the filename generation methods provided by the # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) + src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions res_extension = '.res' obj_extension = '.obj' static_lib_extension = '.lib' @@ -331,23 +347,28 @@ class MSVCCompiler(CCompiler) : self.__paths = [] # target platform (.plat_name is consistent with 'bdist') self.plat_name = None - self.__arch = None # deprecated name + self.__arch = None # deprecated name self.initialized = False def initialize(self, plat_name=None): # multi-init means we would need to check platform same each time... assert not self.initialized, "don't init multiple times" if self.__version < 8.0: - raise DistutilsPlatformError("VC %0.1f is not supported by this module" % self.__version) + raise DistutilsPlatformError( + "VC %0.1f is not supported by this module" % self.__version + ) if plat_name is None: plat_name = get_platform() # sanity check for platforms to prevent obscure errors later. ok_plats = 'win32', 'win-amd64' if plat_name not in ok_plats: - raise DistutilsPlatformError("--plat-name must be one of %s" % - (ok_plats,)) + raise DistutilsPlatformError("--plat-name must be one of %s" % (ok_plats,)) - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): + if ( + "DISTUTILS_USE_SDK" in os.environ + and "MSSdk" in os.environ + and self.find_exe("cl.exe") + ): # Assume that the SDK set up everything alright; don't try to be # smarter self.cc = "cl.exe" @@ -365,8 +386,9 @@ class MSVCCompiler(CCompiler) : plat_spec = PLAT_TO_VCVARS[plat_name] else: # cross compile from win32 -> some 64bit - plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \ - PLAT_TO_VCVARS[plat_name] + plat_spec = ( + PLAT_TO_VCVARS[get_platform()] + '_' + PLAT_TO_VCVARS[plat_name] + ) vc_env = query_vcvarsall(VERSION, plat_spec) @@ -375,18 +397,19 @@ class MSVCCompiler(CCompiler) : os.environ['include'] = vc_env['include'] if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) + raise DistutilsPlatformError( + "Python was built with %s, " + "and extensions need to be built with the same " + "version of the compiler, but it isn't installed." % self.__product + ) self.cc = self.find_exe("cl.exe") self.linker = self.find_exe("link.exe") self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - #self.set_path_env_var('lib') - #self.set_path_env_var('include') + self.rc = self.find_exe("rc.exe") # resource compiler + self.mc = self.find_exe("mc.exe") # message compiler + # self.set_path_env_var('lib') + # self.set_path_env_var('include') # extend the MSVC path with the current path try: @@ -399,71 +422,83 @@ class MSVCCompiler(CCompiler) : self.preprocess_options = None if self.__arch == "x86": - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', - '/Z7', '/D_DEBUG'] + self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/DNDEBUG'] + self.compile_options_debug = [ + '/nologo', + '/Od', + '/MDd', + '/W3', + '/Z7', + '/D_DEBUG', + ] else: # Win64 - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] + self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG'] + self.compile_options_debug = [ + '/nologo', + '/Od', + '/MDd', + '/W3', + '/GS-', + '/Z7', + '/D_DEBUG', + ] self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] + self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'] + self.ldflags_static = ['/nologo'] self.initialized = True # -- Worker methods ------------------------------------------------ - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): # Copied from ccompiler.py, extended to return .res as 'object'-file # for .rc input file - if output_dir is None: output_dir = '' + if output_dir is None: + output_dir = '' obj_names = [] for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / + (base, ext) = os.path.splitext(src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base) :] # If abs, chop off leading / if ext not in self.src_extensions: # Better to raise an exception instead of silently continuing # and later complain about sources and targets having # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) + raise CompileError("Don't know how to compile %s" % src_name) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, base + self.res_extension)) elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, base + self.res_extension)) else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, base + self.obj_extension)) return obj_names - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): + def compile( + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): if not self.initialized: self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) + compile_info = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) macros, objects, extra_postargs, pp_opts, build = compile_info compile_opts = extra_preargs or [] - compile_opts.append ('/c') + compile_opts.append('/c') if debug: compile_opts.extend(self.compile_options_debug) else: @@ -489,8 +524,7 @@ class MSVCCompiler(CCompiler) : input_opt = src output_opt = "/fo" + obj try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) + self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) except DistutilsExecError as msg: raise CompileError(msg) continue @@ -510,50 +544,46 @@ class MSVCCompiler(CCompiler) : rc_dir = os.path.dirname(obj) try: # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') + self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) + base, _ = os.path.splitext(os.path.basename(src)) + rc_file = os.path.join(rc_dir, base + '.rc') # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) + self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) except DistutilsExecError as msg: raise CompileError(msg) continue else: # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) + raise CompileError("Don't know how to compile %s to %s" % (src, obj)) output_opt = "/Fo" + obj try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) + self.spawn( + [self.cc] + + compile_opts + + pp_opts + + [input_opt, output_opt] + + extra_postargs + ) except DistutilsExecError as msg: raise CompileError(msg) return objects - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): lib_args = objects + ['/OUT:' + output_filename] if debug: - pass # XXX what goes here? + pass # XXX what goes here? try: self.spawn([self.lib] + lib_args) except DistutilsExecError as msg: @@ -561,36 +591,36 @@ class MSVCCompiler(CCompiler) : else: log.debug("skipping %s (up-to-date)", output_filename) - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) + fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) (libraries, library_dirs, runtime_library_dirs) = fixed_args if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) + self.warn( + "I don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs) + ) - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) @@ -607,11 +637,12 @@ class MSVCCompiler(CCompiler) : ldflags = self.ldflags_shared export_opts = [] - for sym in (export_symbols or []): + for sym in export_symbols or []: export_opts.append("/EXPORT:" + sym) - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) + ld_args = ( + ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] + ) # The MSVC linker generates .lib and .exp files, which cannot be # suppressed by any linker switches. The .lib files may even be @@ -621,11 +652,10 @@ class MSVCCompiler(CCompiler) : build_temp = os.path.dirname(objects[0]) if export_symbols is not None: (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) + os.path.basename(output_filename) + ) + implib_file = os.path.join(build_temp, self.library_filename(dll_name)) + ld_args.append('/IMPLIB:' + implib_file) self.manifest_setup_ldargs(output_filename, build_temp, ld_args) @@ -650,8 +680,7 @@ class MSVCCompiler(CCompiler) : mffilename, mfid = mfinfo out_arg = '-outputresource:%s;%s' % (output_filename, mfid) try: - self.spawn(['mt.exe', '-nologo', '-manifest', - mffilename, out_arg]) + self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg]) except DistutilsExecError as msg: raise LinkError(msg) else: @@ -665,8 +694,8 @@ class MSVCCompiler(CCompiler) : # Ask the linker to generate the manifest in the temp dir, so # we can check it, and possibly embed it, later. temp_manifest = os.path.join( - build_temp, - os.path.basename(output_filename) + ".manifest") + build_temp, os.path.basename(output_filename) + ".manifest" + ) ld_args.append('/MANIFESTFILE:' + temp_manifest) def manifest_get_embed_info(self, target_desc, ld_args): @@ -709,9 +738,10 @@ class MSVCCompiler(CCompiler) : finally: manifest_f.close() pattern = re.compile( - r"""|)""", - re.DOTALL) + re.DOTALL, + ) manifest_buf = re.sub(pattern, "", manifest_buf) pattern = r"\s*" manifest_buf = re.sub(pattern, "", manifest_buf) @@ -719,7 +749,9 @@ class MSVCCompiler(CCompiler) : # don't want a manifest embedded. pattern = re.compile( r"""|)""", re.DOTALL) + r""".*?(?:/>|)""", + re.DOTALL, + ) if re.search(pattern, manifest_buf) is None: return None @@ -741,12 +773,12 @@ class MSVCCompiler(CCompiler) : def runtime_library_dir_option(self, dir): raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") + "don't know how to set runtime library search path for MSVC++" + ) def library_option(self, lib): return self.library_filename(lib) - def find_library_file(self, dirs, lib, debug=0): # Prefer a debugging library if found (and requested), but deal # with it if we don't have one. @@ -756,7 +788,7 @@ class MSVCCompiler(CCompiler) : try_names = [lib] for dir in dirs: for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) + libfile = os.path.join(dir, self.library_filename(name)) if os.path.exists(libfile): return libfile else: @@ -781,7 +813,7 @@ class MSVCCompiler(CCompiler) : # didn't find it; try existing path for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) + fn = os.path.join(os.path.abspath(p), exe) if os.path.isfile(fn): return fn diff --git a/setuptools/_distutils/msvccompiler.py b/setuptools/_distutils/msvccompiler.py index e1367b8..00c630b 100644 --- a/setuptools/_distutils/msvccompiler.py +++ b/setuptools/_distutils/msvccompiler.py @@ -9,11 +9,14 @@ for the Microsoft Visual Studio. # finding DevStudio (through the registry) import sys, os -from distutils.errors import \ - DistutilsExecError, DistutilsPlatformError, \ - CompileError, LibError, LinkError -from distutils.ccompiler import \ - CCompiler, gen_lib_options +from distutils.errors import ( + DistutilsExecError, + DistutilsPlatformError, + CompileError, + LibError, + LinkError, +) +from distutils.ccompiler import CCompiler, gen_lib_options from distutils import log _can_read_reg = False @@ -32,6 +35,7 @@ except ImportError: try: import win32api import win32con + _can_read_reg = True hkey_mod = win32con @@ -40,17 +44,22 @@ except ImportError: RegEnumValue = win32api.RegEnumValue RegError = win32api.error except ImportError: - log.info("Warning: Can't read registry to find the " - "necessary compiler setting\n" - "Make sure that Python modules winreg, " - "win32api or win32con are installed.") + log.info( + "Warning: Can't read registry to find the " + "necessary compiler setting\n" + "Make sure that Python modules winreg, " + "win32api or win32con are installed." + ) pass if _can_read_reg: - HKEYS = (hkey_mod.HKEY_USERS, - hkey_mod.HKEY_CURRENT_USER, - hkey_mod.HKEY_LOCAL_MACHINE, - hkey_mod.HKEY_CLASSES_ROOT) + HKEYS = ( + hkey_mod.HKEY_USERS, + hkey_mod.HKEY_CURRENT_USER, + hkey_mod.HKEY_LOCAL_MACHINE, + hkey_mod.HKEY_CLASSES_ROOT, + ) + def read_keys(base, key): """Return list of registry keys.""" @@ -69,6 +78,7 @@ def read_keys(base, key): i += 1 return L + def read_values(base, key): """Return dict of registry keys and values. @@ -90,6 +100,7 @@ def read_values(base, key): i += 1 return d + def convert_mbcs(s): dec = getattr(s, "decode", None) if dec is not None: @@ -99,6 +110,7 @@ def convert_mbcs(s): pass return s + class MacroExpander: def __init__(self, version): self.macros = {} @@ -122,12 +134,13 @@ class MacroExpander: self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") else: self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") - except KeyError as exc: # + except KeyError as exc: # raise DistutilsPlatformError( - """Python was built with Visual Studio 2003; + """Python was built with Visual Studio 2003; extensions must be built with a compiler than can generate compatible binaries. Visual Studio 2003 was not found on this system. If you have Cygwin installed, -you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""" + ) p = r"Software\Microsoft\NET Framework Setup\Product" for base in HKEYS: @@ -144,6 +157,7 @@ you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""") s = s.replace(k, v) return s + def get_build_version(): """Return the version of MSVC that was used to build Python. @@ -169,6 +183,7 @@ def get_build_version(): # else we don't know what version of the compiler this is return None + def get_build_architecture(): """Return the processor architecture. @@ -180,7 +195,8 @@ def get_build_architecture(): if i == -1: return "Intel" j = sys.version.find(")", i) - return sys.version[i+len(prefix):j] + return sys.version[i + len(prefix) : j] + def normalize_and_reduce_paths(paths): """Return a list of normalized paths with duplicates removed. @@ -197,9 +213,9 @@ def normalize_and_reduce_paths(paths): return reduced_paths -class MSVCCompiler(CCompiler) : +class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" + as defined by the CCompiler abstract class.""" compiler_type = 'msvc' @@ -218,8 +234,7 @@ class MSVCCompiler(CCompiler) : # Needed for the filename generation methods provided by the # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) + src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions res_extension = '.res' obj_extension = '.obj' static_lib_extension = '.lib' @@ -247,7 +262,11 @@ class MSVCCompiler(CCompiler) : def initialize(self): self.__paths = [] - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): + if ( + "DISTUTILS_USE_SDK" in os.environ + and "MSSdk" in os.environ + and self.find_exe("cl.exe") + ): # Assume that the SDK set up everything alright; don't try to be # smarter self.cc = "cl.exe" @@ -259,16 +278,17 @@ class MSVCCompiler(CCompiler) : self.__paths = self.get_msvc_paths("path") if len(self.__paths) == 0: - raise DistutilsPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) + raise DistutilsPlatformError( + "Python was built with %s, " + "and extensions need to be built with the same " + "version of the compiler, but it isn't installed." % self.__product + ) self.cc = self.find_exe("cl.exe") self.linker = self.find_exe("link.exe") self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler + self.rc = self.find_exe("rc.exe") # resource compiler + self.mc = self.find_exe("mc.exe") # message compiler self.set_path_env_var('lib') self.set_path_env_var('include') @@ -283,75 +303,92 @@ class MSVCCompiler(CCompiler) : self.preprocess_options = None if self.__arch == "Intel": - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GX' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX', - '/Z7', '/D_DEBUG'] + self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GX', '/DNDEBUG'] + self.compile_options_debug = [ + '/nologo', + '/Od', + '/MDd', + '/W3', + '/GX', + '/Z7', + '/D_DEBUG', + ] else: # Win64 - self.compile_options = [ '/nologo', '/O2', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] + self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG'] + self.compile_options_debug = [ + '/nologo', + '/Od', + '/MDd', + '/W3', + '/GS-', + '/Z7', + '/D_DEBUG', + ] self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] + self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'] else: self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] + '/DLL', + '/nologo', + '/INCREMENTAL:no', + '/pdb:None', + '/DEBUG', + ] + self.ldflags_static = ['/nologo'] self.initialized = True # -- Worker methods ------------------------------------------------ - def object_filenames(self, - source_filenames, - strip_dir=0, - output_dir=''): + def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): # Copied from ccompiler.py, extended to return .res as 'object'-file # for .rc input file - if output_dir is None: output_dir = '' + if output_dir is None: + output_dir = '' obj_names = [] for src_name in source_filenames: - (base, ext) = os.path.splitext (src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / + (base, ext) = os.path.splitext(src_name) + base = os.path.splitdrive(base)[1] # Chop off the drive + base = base[os.path.isabs(base) :] # If abs, chop off leading / if ext not in self.src_extensions: # Better to raise an exception instead of silently continuing # and later complain about sources and targets having # different lengths - raise CompileError ("Don't know how to compile %s" % src_name) + raise CompileError("Don't know how to compile %s" % src_name) if strip_dir: - base = os.path.basename (base) + base = os.path.basename(base) if ext in self._rc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, base + self.res_extension)) elif ext in self._mc_extensions: - obj_names.append (os.path.join (output_dir, - base + self.res_extension)) + obj_names.append(os.path.join(output_dir, base + self.res_extension)) else: - obj_names.append (os.path.join (output_dir, - base + self.obj_extension)) + obj_names.append(os.path.join(output_dir, base + self.obj_extension)) return obj_names - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=0, - extra_preargs=None, extra_postargs=None, depends=None): + def compile( + self, + sources, + output_dir=None, + macros=None, + include_dirs=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + depends=None, + ): if not self.initialized: self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) + compile_info = self._setup_compile( + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) macros, objects, extra_postargs, pp_opts, build = compile_info compile_opts = extra_preargs or [] - compile_opts.append ('/c') + compile_opts.append('/c') if debug: compile_opts.extend(self.compile_options_debug) else: @@ -377,8 +414,7 @@ class MSVCCompiler(CCompiler) : input_opt = src output_opt = "/fo" + obj try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) + self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) except DistutilsExecError as msg: raise CompileError(msg) continue @@ -398,50 +434,46 @@ class MSVCCompiler(CCompiler) : rc_dir = os.path.dirname(obj) try: # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext (os.path.basename (src)) - rc_file = os.path.join (rc_dir, base + '.rc') + self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) + base, _ = os.path.splitext(os.path.basename(src)) + rc_file = os.path.join(rc_dir, base + '.rc') # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) + self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) except DistutilsExecError as msg: raise CompileError(msg) continue else: # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) + raise CompileError("Don't know how to compile %s to %s" % (src, obj)) output_opt = "/Fo" + obj try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) + self.spawn( + [self.cc] + + compile_opts + + pp_opts + + [input_opt, output_opt] + + extra_postargs + ) except DistutilsExecError as msg: raise CompileError(msg) return objects - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=0, - target_lang=None): + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): lib_args = objects + ['/OUT:' + output_filename] if debug: - pass # XXX what goes here? + pass # XXX what goes here? try: self.spawn([self.lib] + lib_args) except DistutilsExecError as msg: @@ -449,36 +481,36 @@ class MSVCCompiler(CCompiler) : else: log.debug("skipping %s (up-to-date)", output_filename) - - def link(self, - target_desc, - objects, - output_filename, - output_dir=None, - libraries=None, - library_dirs=None, - runtime_library_dirs=None, - export_symbols=None, - debug=0, - extra_preargs=None, - extra_postargs=None, - build_temp=None, - target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): if not self.initialized: self.initialize() (objects, output_dir) = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) + fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) (libraries, library_dirs, runtime_library_dirs) = fixed_args if runtime_library_dirs: - self.warn ("I don't know what to do with 'runtime_library_dirs': " - + str (runtime_library_dirs)) + self.warn( + "I don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs) + ) - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) @@ -495,11 +527,12 @@ class MSVCCompiler(CCompiler) : ldflags = self.ldflags_shared export_opts = [] - for sym in (export_symbols or []): + for sym in export_symbols or []: export_opts.append("/EXPORT:" + sym) - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) + ld_args = ( + ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] + ) # The MSVC linker generates .lib and .exp files, which cannot be # suppressed by any linker switches. The .lib files may even be @@ -508,11 +541,12 @@ class MSVCCompiler(CCompiler) : # builds, they can go into the same directory. if export_symbols is not None: (dll_name, dll_ext) = os.path.splitext( - os.path.basename(output_filename)) + os.path.basename(output_filename) + ) implib_file = os.path.join( - os.path.dirname(objects[0]), - self.library_filename(dll_name)) - ld_args.append ('/IMPLIB:' + implib_file) + os.path.dirname(objects[0]), self.library_filename(dll_name) + ) + ld_args.append('/IMPLIB:' + implib_file) if extra_preargs: ld_args[:0] = extra_preargs @@ -528,7 +562,6 @@ class MSVCCompiler(CCompiler) : else: log.debug("skipping %s (up-to-date)", output_filename) - # -- Miscellaneous methods ----------------------------------------- # These are all used by the 'gen_lib_options() function, in # ccompiler.py. @@ -538,12 +571,12 @@ class MSVCCompiler(CCompiler) : def runtime_library_dir_option(self, dir): raise DistutilsPlatformError( - "don't know how to set runtime library search path for MSVC++") + "don't know how to set runtime library search path for MSVC++" + ) def library_option(self, lib): return self.library_filename(lib) - def find_library_file(self, dirs, lib, debug=0): # Prefer a debugging library if found (and requested), but deal # with it if we don't have one. @@ -553,7 +586,7 @@ class MSVCCompiler(CCompiler) : try_names = [lib] for dir in dirs: for name in try_names: - libfile = os.path.join(dir, self.library_filename (name)) + libfile = os.path.join(dir, self.library_filename(name)) if os.path.exists(libfile): return libfile else: @@ -578,7 +611,7 @@ class MSVCCompiler(CCompiler) : # didn't find it; try existing path for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) + fn = os.path.join(os.path.abspath(p), exe) if os.path.isfile(fn): return fn @@ -595,11 +628,15 @@ class MSVCCompiler(CCompiler) : path = path + " dirs" if self.__version >= 7: - key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" - % (self.__root, self.__version)) + key = r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" % ( + self.__root, + self.__version, + ) else: - key = (r"%s\6.0\Build System\Components\Platforms" - r"\Win32 (%s)\Directories" % (self.__root, platform)) + key = ( + r"%s\6.0\Build System\Components\Platforms" + r"\Win32 (%s)\Directories" % (self.__root, platform) + ) for base in HKEYS: d = read_values(base, key) @@ -613,10 +650,12 @@ class MSVCCompiler(CCompiler) : if self.__version == 6: for base in HKEYS: if read_values(base, r"%s\6.0" % self.__root) is not None: - self.warn("It seems you have Visual Studio 6 installed, " + self.warn( + "It seems you have Visual Studio 6 installed, " "but the expected registry settings are not present.\n" "You must at least run the Visual Studio GUI once " - "so that these entries are created.") + "so that these entries are created." + ) break return [] @@ -639,5 +678,6 @@ if get_build_version() >= 8.0: log.debug("Importing new compiler from distutils.msvc9compiler") OldMSVCCompiler = MSVCCompiler from distutils.msvc9compiler import MSVCCompiler + # get_build_architecture not really relevant now we support cross-compile from distutils.msvc9compiler import MacroExpander diff --git a/setuptools/_distutils/py35compat.py b/setuptools/_distutils/py35compat.py deleted file mode 100644 index 79b2e7f..0000000 --- a/setuptools/_distutils/py35compat.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys -import subprocess - - -def __optim_args_from_interpreter_flags(): - """Return a list of command-line arguments reproducing the current - optimization settings in sys.flags.""" - args = [] - value = sys.flags.optimize - if value > 0: - args.append("-" + "O" * value) - return args - - -_optim_args_from_interpreter_flags = getattr( - subprocess, - "_optim_args_from_interpreter_flags", - __optim_args_from_interpreter_flags, -) diff --git a/setuptools/_distutils/py38compat.py b/setuptools/_distutils/py38compat.py index 7dbe8ce..e556b69 100644 --- a/setuptools/_distutils/py38compat.py +++ b/setuptools/_distutils/py38compat.py @@ -1,6 +1,7 @@ def aix_platform(osname, version, release): try: import _aix_support + return _aix_support.aix_platform() except ImportError: pass diff --git a/setuptools/_distutils/py39compat.py b/setuptools/_distutils/py39compat.py index 9de9501..c43e5f1 100644 --- a/setuptools/_distutils/py39compat.py +++ b/setuptools/_distutils/py39compat.py @@ -7,6 +7,7 @@ def add_ext_suffix_39(vars): Ensure vars contains 'EXT_SUFFIX'. pypa/distutils#130 """ import _imp + ext_suffix = _imp.extension_suffixes()[0] vars.update( EXT_SUFFIX=ext_suffix, diff --git a/setuptools/_distutils/spawn.py b/setuptools/_distutils/spawn.py index b2d10e3..acd2014 100644 --- a/setuptools/_distutils/spawn.py +++ b/setuptools/_distutils/spawn.py @@ -48,6 +48,7 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): if sys.platform == 'darwin': from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver + macosx_target_ver = get_macosx_target_ver() if macosx_target_ver: env[MACOSX_VERSION_VAR] = macosx_target_ver @@ -59,14 +60,14 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): except OSError as exc: if not DEBUG: cmd = cmd[0] - raise DistutilsExecError( - "command %r failed: %s" % (cmd, exc.args[-1])) from exc + raise DistutilsExecError("command %r failed: %s" % (cmd, exc.args[-1])) from exc if exitcode: if not DEBUG: cmd = cmd[0] raise DistutilsExecError( - "command %r failed with exit code %s" % (cmd, exitcode)) + "command %r failed with exit code %s" % (cmd, exitcode) + ) def find_executable(executable, path=None): diff --git a/setuptools/_distutils/sysconfig.py b/setuptools/_distutils/sysconfig.py index 7543f79..e41d51e 100644 --- a/setuptools/_distutils/sysconfig.py +++ b/setuptools/_distutils/sysconfig.py @@ -61,6 +61,7 @@ def _is_parent(dir_a, dir_b): if os.name == 'nt': + @pass_none def _fix_pcbuild(d): # In a venv, sys._home will be inside BASE_PREFIX rather than PREFIX. @@ -71,6 +72,7 @@ if os.name == 'nt': if _is_parent(d, os.path.join(prefix, "PCbuild")) ) return next(matched, d) + project_base = _fix_pcbuild(project_base) _sys_home = _fix_pcbuild(_sys_home) @@ -116,36 +118,74 @@ def get_python_inc(plat_specific=0, prefix=None): If 'prefix' is supplied, use it instead of sys.base_prefix or sys.base_exec_prefix -- i.e., ignore 'plat_specific'. """ - if prefix is None: - prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX - if os.name == "posix": - if IS_PYPY and sys.version_info < (3, 8): - return os.path.join(prefix, 'include') - if python_build: - # Assume the executable is in the build directory. The - # pyconfig.h file should be in the same directory. Since - # the build directory may not be the source directory, we - # must use "srcdir" from the makefile to find the "Include" - # directory. - if plat_specific: - return _sys_home or project_base - else: - incdir = os.path.join(get_config_var('srcdir'), 'Include') - return os.path.normpath(incdir) - implementation = 'pypy' if IS_PYPY else 'python' - python_dir = implementation + get_python_version() + build_flags - return os.path.join(prefix, "include", python_dir) - elif os.name == "nt": - if python_build: - # Include both the include and PC dir to ensure we can find - # pyconfig.h - return (os.path.join(prefix, "include") + os.path.pathsep + - os.path.join(prefix, "PC")) - return os.path.join(prefix, "include") - else: + default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX + resolved_prefix = prefix if prefix is not None else default_prefix + try: + getter = globals()[f'_get_python_inc_{os.name}'] + except KeyError: raise DistutilsPlatformError( "I don't know where Python installs its C header files " - "on platform '%s'" % os.name) + "on platform '%s'" % os.name + ) + return getter(resolved_prefix, prefix, plat_specific) + + +def _get_python_inc_posix(prefix, spec_prefix, plat_specific): + if IS_PYPY and sys.version_info < (3, 8): + return os.path.join(prefix, 'include') + return ( + _get_python_inc_posix_python(plat_specific) + or _get_python_inc_from_config(plat_specific, spec_prefix) + or _get_python_inc_posix_prefix(prefix) + ) + + +def _get_python_inc_posix_python(plat_specific): + """ + Assume the executable is in the build directory. The + pyconfig.h file should be in the same directory. Since + the build directory may not be the source directory, + use "srcdir" from the makefile to find the "Include" + directory. + """ + if not python_build: + return + if plat_specific: + return _sys_home or project_base + incdir = os.path.join(get_config_var('srcdir'), 'Include') + return os.path.normpath(incdir) + + +def _get_python_inc_from_config(plat_specific, spec_prefix): + """ + If no prefix was explicitly specified, provide the include + directory from the config vars. Useful when + cross-compiling, since the config vars may come from + the host + platform Python installation, while the current Python + executable is from the build platform installation. + """ + if not spec_prefix: + return + return get_config_var('CONF' * plat_specific + 'INCLUDEPY') + + +def _get_python_inc_posix_prefix(prefix): + implementation = 'pypy' if IS_PYPY else 'python' + python_dir = implementation + get_python_version() + build_flags + return os.path.join(prefix, "include", python_dir) + + +def _get_python_inc_nt(prefix, spec_prefix, plat_specific): + if python_build: + # Include both the include and PC dir to ensure we can find + # pyconfig.h + return ( + os.path.join(prefix, "include") + + os.path.pathsep + + os.path.join(prefix, "PC") + ) + return os.path.join(prefix, "include") # allow this behavior to be monkey-patched. Ref pypa/distutils#2. @@ -196,8 +236,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): # Pure Python libdir = "lib" implementation = 'pypy' if IS_PYPY else 'python' - libpython = os.path.join(prefix, libdir, - implementation + get_python_version()) + libpython = os.path.join(prefix, libdir, implementation + get_python_version()) return _posix_lib(standard_lib, libpython, early_prefix, prefix) elif os.name == "nt": if standard_lib: @@ -207,7 +246,8 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): else: raise DistutilsPlatformError( "I don't know where Python installs its library " - "on platform '%s'" % os.name) + "on platform '%s'" % os.name + ) def customize_compiler(compiler): @@ -230,21 +270,36 @@ def customize_compiler(compiler): # Use get_config_var() to ensure _config_vars is initialized. if not get_config_var('CUSTOMIZED_OSX_COMPILER'): import _osx_support + _osx_support.customize_compiler(_config_vars) _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' - (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ - get_config_vars( - 'CC', 'CXX', 'CFLAGS', - 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') + ( + cc, + cxx, + cflags, + ccshared, + ldshared, + shlib_suffix, + ar, + ar_flags, + ) = get_config_vars( + 'CC', + 'CXX', + 'CFLAGS', + 'CCSHARED', + 'LDSHARED', + 'SHLIB_SUFFIX', + 'AR', + 'ARFLAGS', + ) if 'CC' in os.environ: newcc = os.environ['CC'] - if('LDSHARED' not in os.environ - and ldshared.startswith(cc)): + if 'LDSHARED' not in os.environ and ldshared.startswith(cc): # If CC is overridden, use that as the default # command for LDSHARED as well - ldshared = newcc + ldshared[len(cc):] + ldshared = newcc + ldshared[len(cc) :] cc = newcc if 'CXX' in os.environ: cxx = os.environ['CXX'] @@ -253,7 +308,7 @@ def customize_compiler(compiler): if 'CPP' in os.environ: cpp = os.environ['CPP'] else: - cpp = cc + " -E" # not always + cpp = cc + " -E" # not always if 'LDFLAGS' in os.environ: ldshared = ldshared + ' ' + os.environ['LDFLAGS'] if 'CFLAGS' in os.environ: @@ -278,7 +333,8 @@ def customize_compiler(compiler): compiler_cxx=cxx, linker_so=ldshared, linker_exe=cc, - archiver=archiver) + archiver=archiver, + ) if 'RANLIB' in os.environ and compiler.executables.get('ranlib', None): compiler.set_executables(ranlib=os.environ['RANLIB']) @@ -328,9 +384,10 @@ def parse_makefile(fn, g=None): used instead of a new dictionary. """ from distutils.text_file import TextFile + fp = TextFile( - fn, strip_comments=1, skip_blanks=1, join_lines=1, - errors="surrogateescape") + fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape" + ) if g is None: g = {} @@ -383,8 +440,7 @@ def parse_makefile(fn, g=None): item = os.environ[n] elif n in renamed_variables: - if name.startswith('PY_') and \ - name[3:] in renamed_variables: + if name.startswith('PY_') and name[3:] in renamed_variables: item = "" elif 'PY_' + n in notdone: @@ -395,8 +451,8 @@ def parse_makefile(fn, g=None): else: done[n] = item = "" if found: - after = value[m.end():] - value = value[:m.start()] + item + after + after = value[m.end() :] + value = value[: m.start()] + item + after if "$" in after: notdone[name] = value else: @@ -408,8 +464,7 @@ def parse_makefile(fn, g=None): done[name] = value del notdone[name] - if name.startswith('PY_') \ - and name[3:] in renamed_variables: + if name.startswith('PY_') and name[3:] in renamed_variables: name = name[3:] if name not in done: @@ -489,6 +544,6 @@ def get_config_var(name): """ if name == 'SO': import warnings - warnings.warn( - 'SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2) + + warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2) return get_config_vars().get(name) diff --git a/setuptools/_distutils/tests/py35compat.py b/setuptools/_distutils/tests/py35compat.py deleted file mode 100644 index 0c75526..0000000 --- a/setuptools/_distutils/tests/py35compat.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Backward compatibility support for Python 3.5 -""" - -import sys -import test.support -import subprocess - - -# copied from Python 3.9 test.support module -def _missing_compiler_executable(cmd_names=[]): - """Check if the compiler components used to build the interpreter exist. - - Check for the existence of the compiler executables whose names are listed - in 'cmd_names' or all the compiler executables when 'cmd_names' is empty - and return the first missing executable or None when none is found - missing. - - """ - from distutils import ccompiler, sysconfig, spawn - compiler = ccompiler.new_compiler() - sysconfig.customize_compiler(compiler) - for name in compiler.executables: - if cmd_names and name not in cmd_names: - continue - cmd = getattr(compiler, name) - if cmd_names: - assert cmd is not None, \ - "the '%s' executable is not configured" % name - elif not cmd: - continue - if spawn.find_executable(cmd[0]) is None: - return cmd[0] - - -missing_compiler_executable = vars(test.support).setdefault( - 'missing_compiler_executable', - _missing_compiler_executable, -) - - -try: - from test.support import unix_shell -except ImportError: - # Adapted from Python 3.9 test.support module - is_android = hasattr(sys, 'getandroidapilevel') - unix_shell = ( - None if sys.platform == 'win32' else - '/system/bin/sh' if is_android else - '/bin/sh' - ) - - -# copied from Python 3.9 subprocess module -def _optim_args_from_interpreter_flags(): - """Return a list of command-line arguments reproducing the current - optimization settings in sys.flags.""" - args = [] - value = sys.flags.optimize - if value > 0: - args.append('-' + 'O' * value) - return args - - -vars(subprocess).setdefault( - '_optim_args_from_interpreter_flags', - _optim_args_from_interpreter_flags, -) - - -def adapt_glob(regex): - """ - Supply legacy expectation on Python 3.5 - """ - if sys.version_info > (3, 6): - return regex - return regex.replace('(?s:', '').replace(r')\Z', r'\Z(?ms)') diff --git a/setuptools/_distutils/tests/support.py b/setuptools/_distutils/tests/support.py index b4410fc..3085468 100644 --- a/setuptools/_distutils/tests/support.py +++ b/setuptools/_distutils/tests/support.py @@ -15,7 +15,6 @@ from distutils.core import Distribution class LoggingSilencer(object): - def setUp(self): super().setUp() self.threshold = log.set_threshold(log.FATAL) @@ -35,13 +34,11 @@ class LoggingSilencer(object): if level not in (DEBUG, INFO, WARN, ERROR, FATAL): raise ValueError('%s wrong log level' % str(level)) if not isinstance(msg, str): - raise TypeError("msg should be str, not '%.200s'" - % (type(msg).__name__)) + raise TypeError("msg should be str, not '%.200s'" % (type(msg).__name__)) self.logs.append((level, msg, args)) def get_logs(self, *levels): - return [msg % args for level, msg, args - in self.logs if level in levels] + return [msg % args for level, msg, args in self.logs if level in levels] def clear_logs(self): self.logs = [] @@ -120,7 +117,6 @@ class DummyCommand: class EnvironGuard(object): - def setUp(self): super(EnvironGuard, self).setUp() self.old_environ = deepcopy(os.environ) @@ -151,8 +147,9 @@ def copy_xxmodule_c(directory): """ filename = _get_xxmodule_path() if filename is None: - raise unittest.SkipTest('cannot find xxmodule.c (test must run in ' - 'the python build dir)') + raise unittest.SkipTest( + 'cannot find xxmodule.c (test must run in ' 'the python build dir)' + ) shutil.copy(filename, directory) diff --git a/setuptools/_distutils/tests/test_archive_util.py b/setuptools/_distutils/tests/test_archive_util.py index 800b901..8fb9574 100644 --- a/setuptools/_distutils/tests/test_archive_util.py +++ b/setuptools/_distutils/tests/test_archive_util.py @@ -8,9 +8,13 @@ from os.path import splitdrive import warnings from distutils import archive_util -from distutils.archive_util import (check_archive_formats, make_tarball, - make_zipfile, make_archive, - ARCHIVE_FORMATS) +from distutils.archive_util import ( + check_archive_formats, + make_tarball, + make_zipfile, + make_archive, + ARCHIVE_FORMATS, +) from distutils.spawn import find_executable, spawn from distutils.tests import support from test.support import run_unittest, patch @@ -22,12 +26,14 @@ from .py38compat import check_warnings try: import zipfile + ZIP_SUPPORT = True except ImportError: ZIP_SUPPORT = find_executable('zip') try: import zlib + ZLIB_SUPPORT = True except ImportError: ZLIB_SUPPORT = False @@ -42,6 +48,7 @@ try: except ImportError: lzma = None + def can_fs_encode(filename): """ Return True if the filename can be saved in the file system. @@ -55,10 +62,9 @@ def can_fs_encode(filename): return True -class ArchiveUtilTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - +class ArchiveUtilTestCase( + support.TempdirManager, support.LoggingSilencer, unittest.TestCase +): @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') def test_make_tarball(self, name='archive'): # creating something to tar @@ -82,27 +88,31 @@ class ArchiveUtilTestCase(support.TempdirManager, tmpdir = self._create_files() self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz') - @unittest.skipUnless(can_fs_encode('årchiv'), - 'File system cannot handle this filename') + @unittest.skipUnless( + can_fs_encode('årchiv'), 'File system cannot handle this filename' + ) def test_make_tarball_latin1(self): """ Mirror test_make_tarball, except filename contains latin characters. """ - self.test_make_tarball('årchiv') # note this isn't a real word + self.test_make_tarball('årchiv') # note this isn't a real word - @unittest.skipUnless(can_fs_encode('のアーカイブ'), - 'File system cannot handle this filename') + @unittest.skipUnless( + can_fs_encode('のアーカイブ'), 'File system cannot handle this filename' + ) def test_make_tarball_extended(self): """ Mirror test_make_tarball, except filename contains extended characters outside the latin charset. """ - self.test_make_tarball('のアーカイブ') # japanese for archive + self.test_make_tarball('のアーカイブ') # japanese for archive def _make_tarball(self, tmpdir, target_name, suffix, **kwargs): tmpdir2 = self.mkdtemp() - unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], - "source and target should be on same drive") + unittest.skipUnless( + splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], + "source and target should be on same drive", + ) base_name = os.path.join(tmpdir2, target_name) @@ -124,8 +134,14 @@ class ArchiveUtilTestCase(support.TempdirManager, finally: tar.close() - _zip_created_files = ['dist/', 'dist/file1', 'dist/file2', - 'dist/sub/', 'dist/sub/file3', 'dist/sub2/'] + _zip_created_files = [ + 'dist/', + 'dist/file1', + 'dist/file2', + 'dist/sub/', + 'dist/sub/file3', + 'dist/sub2/', + ] _created_files = [p.rstrip('/') for p in _zip_created_files] def _create_files(self): @@ -140,11 +156,12 @@ class ArchiveUtilTestCase(support.TempdirManager, os.mkdir(os.path.join(dist, 'sub2')) return tmpdir - @unittest.skipUnless(find_executable('tar') and find_executable('gzip') - and ZLIB_SUPPORT, - 'Need the tar, gzip and zlib command to run') + @unittest.skipUnless( + find_executable('tar') and find_executable('gzip') and ZLIB_SUPPORT, + 'Need the tar, gzip and zlib command to run', + ) def test_tarfile_vs_tar(self): - tmpdir = self._create_files() + tmpdir = self._create_files() tmpdir2 = self.mkdtemp() base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() @@ -197,10 +214,11 @@ class ArchiveUtilTestCase(support.TempdirManager, tarball = base_name + '.tar' self.assertTrue(os.path.exists(tarball)) - @unittest.skipUnless(find_executable('compress'), - 'The compress program is required') + @unittest.skipUnless( + find_executable('compress'), 'The compress program is required' + ) def test_compress_deprecated(self): - tmpdir = self._create_files() + tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') # using compress and testing the PendingDeprecationWarning @@ -223,15 +241,15 @@ class ArchiveUtilTestCase(support.TempdirManager, try: with check_warnings() as w: warnings.simplefilter("always") - make_tarball(base_name, 'dist', compress='compress', - dry_run=True) + make_tarball(base_name, 'dist', compress='compress', dry_run=True) finally: os.chdir(old_dir) self.assertFalse(os.path.exists(tarball)) self.assertEqual(len(w.warnings), 1) - @unittest.skipUnless(ZIP_SUPPORT and ZLIB_SUPPORT, - 'Need zip and zlib support to run') + @unittest.skipUnless( + ZIP_SUPPORT and ZLIB_SUPPORT, 'Need zip and zlib support to run' + ) def test_make_zipfile(self): # creating something to tar tmpdir = self._create_files() @@ -251,6 +269,7 @@ class ArchiveUtilTestCase(support.TempdirManager, called = [] zipfile_class = zipfile.ZipFile + def fake_zipfile(*a, **kw): if kw.get('compression', None) == zipfile.ZIP_STORED: called.append((a, kw)) @@ -265,17 +284,18 @@ class ArchiveUtilTestCase(support.TempdirManager, make_zipfile(base_name, 'dist') tarball = base_name + '.zip' - self.assertEqual(called, - [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]) + self.assertEqual( + called, [((tarball, "w"), {'compression': zipfile.ZIP_STORED})] + ) self.assertTrue(os.path.exists(tarball)) with zipfile.ZipFile(tarball) as zf: self.assertEqual(sorted(zf.namelist()), self._zip_created_files) def test_check_archive_formats(self): - self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']), - 'xxx') - self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar', - 'ztar', 'tar', 'zip'])) + self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']), 'xxx') + self.assertIsNone( + check_archive_formats(['gztar', 'bztar', 'xztar', 'ztar', 'tar', 'zip']) + ) def test_make_archive(self): tmpdir = self.mkdtemp() @@ -284,8 +304,10 @@ class ArchiveUtilTestCase(support.TempdirManager, def test_make_archive_cwd(self): current_dir = os.getcwd() + def _breaks(*args, **kw): raise RuntimeError() + ARCHIVE_FORMATS['xxx'] = (_breaks, [], 'xxx file') try: try: @@ -297,8 +319,8 @@ class ArchiveUtilTestCase(support.TempdirManager, del ARCHIVE_FORMATS['xxx'] def test_make_archive_tar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') res = make_archive(base_name, 'tar', base_dir, 'dist') self.assertTrue(os.path.exists(res)) self.assertEqual(os.path.basename(res), 'archive.tar') @@ -306,8 +328,8 @@ class ArchiveUtilTestCase(support.TempdirManager, @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') def test_make_archive_gztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') res = make_archive(base_name, 'gztar', base_dir, 'dist') self.assertTrue(os.path.exists(res)) self.assertEqual(os.path.basename(res), 'archive.tar.gz') @@ -315,8 +337,8 @@ class ArchiveUtilTestCase(support.TempdirManager, @unittest.skipUnless(bz2, 'Need bz2 support to run') def test_make_archive_bztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') res = make_archive(base_name, 'bztar', base_dir, 'dist') self.assertTrue(os.path.exists(res)) self.assertEqual(os.path.basename(res), 'archive.tar.bz2') @@ -324,8 +346,8 @@ class ArchiveUtilTestCase(support.TempdirManager, @unittest.skipUnless(lzma, 'Need xz support to run') def test_make_archive_xztar(self): - base_dir = self._create_files() - base_name = os.path.join(self.mkdtemp() , 'archive') + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') res = make_archive(base_name, 'xztar', base_dir, 'dist') self.assertTrue(os.path.exists(res)) self.assertEqual(os.path.basename(res), 'archive.tar.xz') @@ -340,37 +362,41 @@ class ArchiveUtilTestCase(support.TempdirManager, else: group = owner = 'root' - base_dir = self._create_files() + base_dir = self._create_files() root_dir = self.mkdtemp() - base_name = os.path.join(self.mkdtemp() , 'archive') - res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, - group=group) + base_name = os.path.join(self.mkdtemp(), 'archive') + res = make_archive( + base_name, 'zip', root_dir, base_dir, owner=owner, group=group + ) self.assertTrue(os.path.exists(res)) res = make_archive(base_name, 'zip', root_dir, base_dir) self.assertTrue(os.path.exists(res)) - res = make_archive(base_name, 'tar', root_dir, base_dir, - owner=owner, group=group) + res = make_archive( + base_name, 'tar', root_dir, base_dir, owner=owner, group=group + ) self.assertTrue(os.path.exists(res)) - res = make_archive(base_name, 'tar', root_dir, base_dir, - owner='kjhkjhkjg', group='oihohoh') + res = make_archive( + base_name, 'tar', root_dir, base_dir, owner='kjhkjhkjg', group='oihohoh' + ) self.assertTrue(os.path.exists(res)) @unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib") @require_unix_id @require_uid_0 def test_tarfile_root_owner(self): - tmpdir = self._create_files() + tmpdir = self._create_files() base_name = os.path.join(self.mkdtemp(), 'archive') old_dir = os.getcwd() os.chdir(tmpdir) group = grp.getgrgid(0)[0] owner = pwd.getpwuid(0)[0] try: - archive_name = make_tarball(base_name, 'dist', compress=None, - owner=owner, group=group) + archive_name = make_tarball( + base_name, 'dist', compress=None, owner=owner, group=group + ) finally: os.chdir(old_dir) @@ -386,8 +412,10 @@ class ArchiveUtilTestCase(support.TempdirManager, finally: archive.close() + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(ArchiveUtilTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist.py b/setuptools/_distutils/tests/test_bdist.py index 8b7498e..2d0bb95 100644 --- a/setuptools/_distutils/tests/test_bdist.py +++ b/setuptools/_distutils/tests/test_bdist.py @@ -8,9 +8,7 @@ from distutils.command.bdist import bdist from distutils.tests import support -class BuildTestCase(support.TempdirManager, - unittest.TestCase): - +class BuildTestCase(support.TempdirManager, unittest.TestCase): def test_formats(self): # let's create a command and make sure # we can set the format @@ -21,8 +19,17 @@ class BuildTestCase(support.TempdirManager, self.assertEqual(cmd.formats, ['msi']) # what formats does bdist offer? - formats = ['bztar', 'gztar', 'msi', 'rpm', 'tar', - 'wininst', 'xztar', 'zip', 'ztar'] + formats = [ + 'bztar', + 'gztar', + 'msi', + 'rpm', + 'tar', + 'wininst', + 'xztar', + 'zip', + 'ztar', + ] found = sorted(cmd.format_command) self.assertEqual(found, formats) @@ -34,24 +41,30 @@ class BuildTestCase(support.TempdirManager, cmd.ensure_finalized() dist.command_obj['bdist'] = cmd - names = ['bdist_dumb', 'bdist_wininst'] # bdist_rpm does not support --skip-build + names = [ + 'bdist_dumb', + 'bdist_wininst', + ] # bdist_rpm does not support --skip-build if os.name == 'nt': names.append('bdist_msi') for name in names: with warnings.catch_warnings(): - warnings.filterwarnings('ignore', 'bdist_wininst command is deprecated', - DeprecationWarning) + warnings.filterwarnings( + 'ignore', 'bdist_wininst command is deprecated', DeprecationWarning + ) subcmd = cmd.get_finalized_command(name) if getattr(subcmd, '_unsupported', False): # command is not supported on this build continue - self.assertTrue(subcmd.skip_build, - '%s should take --skip-build from bdist' % name) + self.assertTrue( + subcmd.skip_build, '%s should take --skip-build from bdist' % name + ) def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) + if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_dumb.py b/setuptools/_distutils/tests/test_bdist_dumb.py index bb860c8..83ab217 100644 --- a/setuptools/_distutils/tests/test_bdist_dumb.py +++ b/setuptools/_distutils/tests/test_bdist_dumb.py @@ -21,16 +21,18 @@ setup(name='foo', version='0.1', py_modules=['foo'], try: import zlib + ZLIB_SUPPORT = True except ImportError: ZLIB_SUPPORT = False -class BuildDumbTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - +class BuildDumbTestCase( + support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase, +): def setUp(self): super(BuildDumbTestCase, self).setUp() self.old_location = os.getcwd() @@ -54,10 +56,16 @@ class BuildDumbTestCase(support.TempdirManager, self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) + dist = Distribution( + { + 'name': 'foo', + 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + } + ) dist.script_name = 'setup.py' os.chdir(pkg_dir) @@ -90,8 +98,10 @@ class BuildDumbTestCase(support.TempdirManager, wanted.append('foo.%s.pyc' % sys.implementation.cache_tag) self.assertEqual(contents, sorted(wanted)) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(BuildDumbTestCase) + if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_msi.py b/setuptools/_distutils/tests/test_bdist_msi.py index b1831ef..a9f3dbb 100644 --- a/setuptools/_distutils/tests/test_bdist_msi.py +++ b/setuptools/_distutils/tests/test_bdist_msi.py @@ -8,13 +8,13 @@ from .py38compat import check_warnings @unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows') -class BDistMSITestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - +class BDistMSITestCase( + support.TempdirManager, support.LoggingSilencer, unittest.TestCase +): def test_minimal(self): # minimal test XXX need more tests from distutils.command.bdist_msi import bdist_msi + project_dir, dist = self.create_dist() with check_warnings(("", DeprecationWarning)): cmd = bdist_msi(dist) @@ -24,5 +24,6 @@ class BDistMSITestCase(support.TempdirManager, def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(BDistMSITestCase) + if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_rpm.py b/setuptools/_distutils/tests/test_bdist_rpm.py index 08a7cb4..f60a582 100644 --- a/setuptools/_distutils/tests/test_bdist_rpm.py +++ b/setuptools/_distutils/tests/test_bdist_rpm.py @@ -22,11 +22,13 @@ setup(name='foo', version='0.1', py_modules=['foo'], """ -class BuildRpmTestCase(support.TempdirManager, - support.EnvironGuard, - support.LoggingSilencer, - unittest.TestCase): +class BuildRpmTestCase( + support.TempdirManager, + support.EnvironGuard, + support.LoggingSilencer, + unittest.TestCase, +): def setUp(self): try: sys.executable.encode("UTF-8") @@ -45,17 +47,18 @@ class BuildRpmTestCase(support.TempdirManager, # XXX I am unable yet to make this test work without # spurious sdtout/stderr output under Mac OS X - @unittest.skipUnless(sys.platform.startswith('linux'), - 'spurious sdtout/stderr output under Mac OS X') + @unittest.skipUnless( + sys.platform.startswith('linux'), 'spurious sdtout/stderr output under Mac OS X' + ) @requires_zlib() - @unittest.skipIf(find_executable('rpm') is None, - 'the rpm command is not found') - @unittest.skipIf(find_executable('rpmbuild') is None, - 'the rpmbuild command is not found') + @unittest.skipIf(find_executable('rpm') is None, 'the rpm command is not found') + @unittest.skipIf( + find_executable('rpmbuild') is None, 'the rpmbuild command is not found' + ) def test_quiet(self): # let's create a package tmp_dir = self.mkdtemp() - os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation + os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation pkg_dir = os.path.join(tmp_dir, 'foo') os.mkdir(pkg_dir) self.write_file((pkg_dir, 'setup.py'), SETUP_PY) @@ -63,10 +66,16 @@ class BuildRpmTestCase(support.TempdirManager, self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) + dist = Distribution( + { + 'name': 'foo', + 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + } + ) dist.script_name = 'setup.py' os.chdir(pkg_dir) @@ -84,22 +93,25 @@ class BuildRpmTestCase(support.TempdirManager, # bug #2945: upload ignores bdist_rpm files self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) + self.assertIn( + ('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files + ) # XXX I am unable yet to make this test work without # spurious sdtout/stderr output under Mac OS X - @unittest.skipUnless(sys.platform.startswith('linux'), - 'spurious sdtout/stderr output under Mac OS X') + @unittest.skipUnless( + sys.platform.startswith('linux'), 'spurious sdtout/stderr output under Mac OS X' + ) @requires_zlib() # http://bugs.python.org/issue1533164 - @unittest.skipIf(find_executable('rpm') is None, - 'the rpm command is not found') - @unittest.skipIf(find_executable('rpmbuild') is None, - 'the rpmbuild command is not found') + @unittest.skipIf(find_executable('rpm') is None, 'the rpm command is not found') + @unittest.skipIf( + find_executable('rpmbuild') is None, 'the rpmbuild command is not found' + ) def test_no_optimize_flag(self): # let's create a package that breaks bdist_rpm tmp_dir = self.mkdtemp() - os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation + os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation pkg_dir = os.path.join(tmp_dir, 'foo') os.mkdir(pkg_dir) self.write_file((pkg_dir, 'setup.py'), SETUP_PY) @@ -107,10 +119,16 @@ class BuildRpmTestCase(support.TempdirManager, self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') self.write_file((pkg_dir, 'README'), '') - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) + dist = Distribution( + { + 'name': 'foo', + 'version': '0.1', + 'py_modules': ['foo'], + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + } + ) dist.script_name = 'setup.py' os.chdir(pkg_dir) @@ -127,12 +145,16 @@ class BuildRpmTestCase(support.TempdirManager, # bug #2945: upload ignores bdist_rpm files self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files) - self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files) + self.assertIn( + ('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files + ) os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm')) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(BuildRpmTestCase) + if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_bdist_wininst.py b/setuptools/_distutils/tests/test_bdist_wininst.py index 59f2516..c103a63 100644 --- a/setuptools/_distutils/tests/test_bdist_wininst.py +++ b/setuptools/_distutils/tests/test_bdist_wininst.py @@ -9,14 +9,18 @@ from .py38compat import check_warnings from distutils.command.bdist_wininst import bdist_wininst from distutils.tests import support -@unittest.skipIf(sys.platform == 'win32' and platform.machine() == 'ARM64', - 'bdist_wininst is not supported in this install') -@unittest.skipIf(getattr(bdist_wininst, '_unsupported', False), - 'bdist_wininst is not supported in this install') -class BuildWinInstTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): +@unittest.skipIf( + sys.platform == 'win32' and platform.machine() == 'ARM64', + 'bdist_wininst is not supported in this install', +) +@unittest.skipIf( + getattr(bdist_wininst, '_unsupported', False), + 'bdist_wininst is not supported in this install', +) +class BuildWinInstTestCase( + support.TempdirManager, support.LoggingSilencer, unittest.TestCase +): def test_get_exe_bytes(self): # issue5731: command was broken on non-windows platforms @@ -33,8 +37,10 @@ class BuildWinInstTestCase(support.TempdirManager, exe_file = cmd.get_exe_bytes() self.assertGreater(len(exe_file), 10) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(BuildWinInstTestCase) + if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_build.py b/setuptools/_distutils/tests/test_build.py index 9372441..190bbdf 100644 --- a/setuptools/_distutils/tests/test_build.py +++ b/setuptools/_distutils/tests/test_build.py @@ -8,10 +8,8 @@ from distutils.command.build import build from distutils.tests import support from sysconfig import get_platform -class BuildTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): +class BuildTestCase(support.TempdirManager, support.LoggingSilencer, unittest.TestCase): def test_finalize_options(self): pkg_dir, dist = self.create_dist() cmd = build(dist) @@ -42,15 +40,16 @@ class BuildTestCase(support.TempdirManager, self.assertEqual(cmd.build_temp, wanted) # build_scripts is build/scripts-x.x - wanted = os.path.join(cmd.build_base, - 'scripts-%d.%d' % sys.version_info[:2]) + wanted = os.path.join(cmd.build_base, 'scripts-%d.%d' % sys.version_info[:2]) self.assertEqual(cmd.build_scripts, wanted) # executable is os.path.normpath(sys.executable) self.assertEqual(cmd.executable, os.path.normpath(sys.executable)) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(BuildTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_build_clib.py b/setuptools/_distutils/tests/test_build_clib.py index d50ead7..24c7478 100644 --- a/setuptools/_distutils/tests/test_build_clib.py +++ b/setuptools/_distutils/tests/test_build_clib.py @@ -3,18 +3,16 @@ import unittest import os import sys -from test.support import run_unittest - -from .py35compat import missing_compiler_executable +from test.support import run_unittest, missing_compiler_executable from distutils.command.build_clib import build_clib from distutils.errors import DistutilsSetupError from distutils.tests import support -class BuildCLibTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): +class BuildCLibTestCase( + support.TempdirManager, support.LoggingSilencer, unittest.TestCase +): def test_check_library_dist(self): pkg_dir, dist = self.create_dist() cmd = build_clib(dist) @@ -23,23 +21,27 @@ class BuildCLibTestCase(support.TempdirManager, self.assertRaises(DistutilsSetupError, cmd.check_library_list, 'foo') # each element of 'libraries' must a 2-tuple - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - ['foo1', 'foo2']) + self.assertRaises(DistutilsSetupError, cmd.check_library_list, ['foo1', 'foo2']) # first element of each tuple in 'libraries' # must be a string (the library name) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [(1, 'foo1'), ('name', 'foo2')]) + self.assertRaises( + DistutilsSetupError, cmd.check_library_list, [(1, 'foo1'), ('name', 'foo2')] + ) # library name may not contain directory separators - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [('name', 'foo1'), - ('another/name', 'foo2')]) + self.assertRaises( + DistutilsSetupError, + cmd.check_library_list, + [('name', 'foo1'), ('another/name', 'foo2')], + ) # second element of each tuple must be a dictionary (build info) - self.assertRaises(DistutilsSetupError, cmd.check_library_list, - [('name', {}), - ('another', 'foo2')]) + self.assertRaises( + DistutilsSetupError, + cmd.check_library_list, + [('name', {}), ('another', 'foo2')], + ) # those work libs = [('name', {}), ('name', {'ok': 'good'})] @@ -63,17 +65,21 @@ class BuildCLibTestCase(support.TempdirManager, cmd.libraries = [('name', {'sources': ('a', 'b')})] self.assertEqual(cmd.get_source_files(), ['a', 'b']) - cmd.libraries = [('name', {'sources': ('a', 'b')}), - ('name2', {'sources': ['c', 'd']})] + cmd.libraries = [ + ('name', {'sources': ('a', 'b')}), + ('name2', {'sources': ['c', 'd']}), + ] self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd']) def test_build_libraries(self): pkg_dir, dist = self.create_dist() cmd = build_clib(dist) + class FakeCompiler: def compile(*args, **kw): pass + create_static_lib = compile cmd.compiler = FakeCompiler() @@ -129,8 +135,10 @@ class BuildCLibTestCase(support.TempdirManager, # let's check the result self.assertIn('libfoo.a', os.listdir(build_temp)) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(BuildCLibTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_build_ext.py b/setuptools/_distutils/tests/test_build_ext.py index 920e4dc..c42ceab 100644 --- a/setuptools/_distutils/tests/test_build_ext.py +++ b/setuptools/_distutils/tests/test_build_ext.py @@ -6,12 +6,19 @@ import textwrap from distutils.core import Distribution from distutils.command.build_ext import build_ext from distutils import sysconfig -from distutils.tests.support import (TempdirManager, LoggingSilencer, - copy_xxmodule_c, fixup_build_ext) +from distutils.tests.support import ( + TempdirManager, + LoggingSilencer, + copy_xxmodule_c, + fixup_build_ext, +) from distutils.extension import Extension from distutils.errors import ( - CompileError, DistutilsPlatformError, DistutilsSetupError, - UnknownFileError) + CompileError, + DistutilsPlatformError, + DistutilsSetupError, + UnknownFileError, +) import unittest from test import support @@ -23,17 +30,17 @@ from test.support.script_helper import assert_python_ok ALREADY_TESTED = False -class BuildExtTestCase(TempdirManager, - LoggingSilencer, - unittest.TestCase): +class BuildExtTestCase(TempdirManager, LoggingSilencer, unittest.TestCase): def setUp(self): # Create a simple test environment super(BuildExtTestCase, self).setUp() self.tmp_dir = self.mkdtemp() import site + self.old_user_base = site.USER_BASE site.USER_BASE = self.mkdtemp() from distutils.command import build_ext + build_ext.USER_BASE = site.USER_BASE # bpo-30132: On Windows, a .pdb file may be created in the current @@ -45,8 +52,10 @@ class BuildExtTestCase(TempdirManager, def tearDown(self): import site + site.USER_BASE = self.old_user_base from distutils.command import build_ext + build_ext.USER_BASE = self.old_user_base super(BuildExtTestCase, self).tearDown() @@ -83,7 +92,8 @@ class BuildExtTestCase(TempdirManager, else: ALREADY_TESTED = type(self).__name__ - code = textwrap.dedent(""" + code = textwrap.dedent( + """ tmp_dir = {self.tmp_dir!r} import sys @@ -109,7 +119,10 @@ class BuildExtTestCase(TempdirManager, unittest.main() - """.format(**locals())) + """.format( + **locals() + ) + ) assert_python_ok('-c', code) def test_solaris_enable_shared(self): @@ -117,8 +130,9 @@ class BuildExtTestCase(TempdirManager, cmd = self.build_ext(dist) old = sys.platform - sys.platform = 'sunos' # fooling finalize_options - from distutils.sysconfig import _config_vars + sys.platform = 'sunos' # fooling finalize_options + from distutils.sysconfig import _config_vars + old_var = _config_vars.get('Py_ENABLE_SHARED') _config_vars['Py_ENABLE_SHARED'] = 1 try: @@ -135,12 +149,12 @@ class BuildExtTestCase(TempdirManager, def test_user_site(self): import site + dist = Distribution({'name': 'xx'}) cmd = self.build_ext(dist) # making sure the user option is there - options = [name for name, short, lable in - cmd.user_options] + options = [name for name, short, lable in cmd.user_options] self.assertIn('user', options) # setting a value @@ -169,8 +183,9 @@ class BuildExtTestCase(TempdirManager, dist = Distribution({'name': 'xx', 'ext_modules': modules}) cmd = self.build_ext(dist) cmd.ensure_finalized() - self.assertRaises((UnknownFileError, CompileError), - cmd.run) # should raise an error + self.assertRaises( + (UnknownFileError, CompileError), cmd.run + ) # should raise an error modules = [Extension('foo', ['xxx'], optional=True)] dist = Distribution({'name': 'xx', 'ext_modules': modules}) @@ -256,8 +271,7 @@ class BuildExtTestCase(TempdirManager, cmd.finalize_options() #'extensions' option must be a list of Extension instances - self.assertRaises(DistutilsSetupError, - cmd.check_extensions_list, 'foo') + self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, 'foo') # each element of 'ext_modules' option must be an # Extension instance or 2-tuple @@ -276,8 +290,7 @@ class BuildExtTestCase(TempdirManager, self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) # ok this one should pass - exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', - 'some': 'bar'})] + exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', 'some': 'bar'})] cmd.check_extensions_list(exts) ext = exts[0] self.assertIsInstance(ext, Extension) @@ -289,8 +302,17 @@ class BuildExtTestCase(TempdirManager, self.assertFalse(hasattr(ext, 'some')) # 'macros' element of build info dict must be 1- or 2-tuple - exts = [('foo.bar', {'sources': [''], 'libraries': 'foo', - 'some': 'bar', 'macros': [('1', '2', '3'), 'foo']})] + exts = [ + ( + 'foo.bar', + { + 'sources': [''], + 'libraries': 'foo', + 'some': 'bar', + 'macros': [('1', '2', '3'), 'foo'], + }, + ) + ] self.assertRaises(DistutilsSetupError, cmd.check_extensions_list, exts) exts[0][1]['macros'] = [('1', '2'), ('3',)] @@ -337,8 +359,7 @@ class BuildExtTestCase(TempdirManager, c_file = os.path.join(tmp_dir, 'foo.c') self.write_file(c_file, 'void PyInit_foo(void) {}\n') ext = Extension('foo', [c_file], optional=False) - dist = Distribution({'name': 'xx', - 'ext_modules': [ext]}) + dist = Distribution({'name': 'xx', 'ext_modules': [ext]}) cmd = self.build_ext(dist) fixup_build_ext(cmd) cmd.ensure_finalized() @@ -398,9 +419,9 @@ class BuildExtTestCase(TempdirManager, def test_ext_fullpath(self): ext = sysconfig.get_config_var('EXT_SUFFIX') # building lxml.etree inplace - #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c') - #etree_ext = Extension('lxml.etree', [etree_c]) - #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) + # etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c') + # etree_ext = Extension('lxml.etree', [etree_c]) + # dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) dist = Distribution() cmd = self.build_ext(dist) cmd.inplace = 1 @@ -423,8 +444,7 @@ class BuildExtTestCase(TempdirManager, build_py.package_dir = {} cmd.distribution.packages = ['twisted', 'twisted.runner.portmap'] path = cmd.get_ext_fullpath('twisted.runner.portmap') - wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', - 'portmap' + ext) + wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', 'portmap' + ext) self.assertEqual(wanted, path) # building twisted.runner.portmap inplace @@ -433,7 +453,6 @@ class BuildExtTestCase(TempdirManager, wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) self.assertEqual(wanted, path) - @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') def test_deployment_target_default(self): # Issue 9516: Test that, in the absence of the environment variable, @@ -445,8 +464,9 @@ class BuildExtTestCase(TempdirManager, def test_deployment_target_too_low(self): # Issue 9516: Test that an extension module is not allowed to be # compiled with a deployment target less than that of the interpreter. - self.assertRaises(DistutilsPlatformError, - self._try_compile_deployment_target, '>', '10.1') + self.assertRaises( + DistutilsPlatformError, self._try_compile_deployment_target, '>', '10.1' + ) @unittest.skipUnless(sys.platform == 'darwin', 'test only relevant for MacOSX') def test_deployment_target_higher_ok(self): @@ -475,7 +495,9 @@ class BuildExtTestCase(TempdirManager, deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c') with open(deptarget_c, 'w') as fp: - fp.write(textwrap.dedent('''\ + fp.write( + textwrap.dedent( + '''\ #include int dummy; @@ -485,7 +507,10 @@ class BuildExtTestCase(TempdirManager, #error "Unexpected target" #endif - ''' % operator)) + ''' + % operator + ) + ) # get the deployment target that the interpreter was built with target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') @@ -506,12 +531,9 @@ class BuildExtTestCase(TempdirManager, deptarget_ext = Extension( 'deptarget', [deptarget_c], - extra_compile_args=['-DTARGET=%s'%(target,)], + extra_compile_args=['-DTARGET=%s' % (target,)], ) - dist = Distribution({ - 'name': 'deptarget', - 'ext_modules': [deptarget_ext] - }) + dist = Distribution({'name': 'deptarget', 'ext_modules': [deptarget_ext]}) dist.package_dir = self.tmp_dir cmd = self.build_ext(dist) cmd.build_lib = self.tmp_dir @@ -533,7 +555,6 @@ class BuildExtTestCase(TempdirManager, class ParallelBuildExtTestCase(BuildExtTestCase): - def build_ext(self, *args, **kwargs): build_ext = super().build_ext(*args, **kwargs) build_ext.parallel = True @@ -546,5 +567,6 @@ def test_suite(): suite.addTest(unittest.TestLoader().loadTestsFromTestCase(ParallelBuildExtTestCase)) return suite + if __name__ == '__main__': support.run_unittest(__name__) diff --git a/setuptools/_distutils/tests/test_build_py.py b/setuptools/_distutils/tests/test_build_py.py index a590a48..4585d79 100644 --- a/setuptools/_distutils/tests/test_build_py.py +++ b/setuptools/_distutils/tests/test_build_py.py @@ -12,10 +12,9 @@ from distutils.tests import support from test.support import run_unittest -class BuildPyTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - +class BuildPyTestCase( + support.TempdirManager, support.LoggingSilencer, unittest.TestCase +): def test_package_data(self): sources = self.mkdtemp() f = open(os.path.join(sources, "__init__.py"), "w") @@ -31,13 +30,10 @@ class BuildPyTestCase(support.TempdirManager, destination = self.mkdtemp() - dist = Distribution({"packages": ["pkg"], - "package_dir": {"pkg": sources}}) + dist = Distribution({"packages": ["pkg"], "package_dir": {"pkg": sources}}) # script_name need not exist, it just need to be initialized dist.script_name = os.path.join(sources, "setup.py") - dist.command_obj["build"] = support.DummyCommand( - force=0, - build_lib=destination) + dist.command_obj["build"] = support.DummyCommand(force=0, build_lib=destination) dist.packages = ["pkg"] dist.package_data = {"pkg": ["README.txt"]} dist.package_dir = {"pkg": sources} @@ -62,8 +58,7 @@ class BuildPyTestCase(support.TempdirManager, self.assertFalse(os.path.exists(pycache_dir)) else: pyc_files = os.listdir(pycache_dir) - self.assertIn("__init__.%s.pyc" % sys.implementation.cache_tag, - pyc_files) + self.assertIn("__init__.%s.pyc" % sys.implementation.cache_tag, pyc_files) def test_empty_package_dir(self): # See bugs #1668596/#1720897 @@ -75,9 +70,13 @@ class BuildPyTestCase(support.TempdirManager, open(os.path.join(testdir, "testfile"), "w").close() os.chdir(sources) - dist = Distribution({"packages": ["pkg"], - "package_dir": {"pkg": ""}, - "package_data": {"pkg": ["doc/*"]}}) + dist = Distribution( + { + "packages": ["pkg"], + "package_dir": {"pkg": ""}, + "package_data": {"pkg": ["doc/*"]}, + } + ) # script_name need not exist, it just need to be initialized dist.script_name = os.path.join(sources, "setup.py") dist.script_args = ["build"] @@ -102,8 +101,7 @@ class BuildPyTestCase(support.TempdirManager, found = os.listdir(cmd.build_lib) self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - self.assertEqual(found, - ['boiledeggs.%s.pyc' % sys.implementation.cache_tag]) + self.assertEqual(found, ['boiledeggs.%s.pyc' % sys.implementation.cache_tag]) @unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled') def test_byte_compile_optimized(self): @@ -142,8 +140,7 @@ class BuildPyTestCase(support.TempdirManager, os.mkdir(os.path.join(docdir, 'otherdir')) os.chdir(sources) - dist = Distribution({"packages": ["pkg"], - "package_data": {"pkg": ["doc/*"]}}) + dist = Distribution({"packages": ["pkg"], "package_data": {"pkg": ["doc/*"]}}) # script_name need not exist, it just need to be initialized dist.script_name = os.path.join(sources, "setup.py") dist.script_args = ["build"] @@ -168,12 +165,12 @@ class BuildPyTestCase(support.TempdirManager, finally: sys.dont_write_bytecode = old_dont_write_bytecode - self.assertIn('byte-compiling is disabled', - self.logs[0][1] % self.logs[0][2]) + self.assertIn('byte-compiling is disabled', self.logs[0][1] % self.logs[0][2]) def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(BuildPyTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_build_scripts.py b/setuptools/_distutils/tests/test_build_scripts.py index f299e51..8c7061d 100644 --- a/setuptools/_distutils/tests/test_build_scripts.py +++ b/setuptools/_distutils/tests/test_build_scripts.py @@ -11,10 +11,9 @@ from distutils.tests import support from test.support import run_unittest -class BuildScriptsTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - +class BuildScriptsTestCase( + support.TempdirManager, support.LoggingSilencer, unittest.TestCase +): def test_default_settings(self): cmd = self.get_build_scripts_cmd("/foo/bar", []) self.assertFalse(cmd.force) @@ -30,9 +29,9 @@ class BuildScriptsTestCase(support.TempdirManager, target = self.mkdtemp() expected = self.write_sample_scripts(source) - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) + cmd = self.get_build_scripts_cmd( + target, [os.path.join(source, fn) for fn in expected] + ) cmd.finalize_options() cmd.run() @@ -42,32 +41,38 @@ class BuildScriptsTestCase(support.TempdirManager, def get_build_scripts_cmd(self, target, scripts): import sys + dist = Distribution() dist.scripts = scripts dist.command_obj["build"] = support.DummyCommand( - build_scripts=target, - force=1, - executable=sys.executable - ) + build_scripts=target, force=1, executable=sys.executable + ) return build_scripts(dist) def write_sample_scripts(self, dir): expected = [] expected.append("script1.py") - self.write_script(dir, "script1.py", - ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) + self.write_script( + dir, + "script1.py", + ( + "#! /usr/bin/env python2.3\n" + "# bogus script w/ Python sh-bang\n" + "pass\n" + ), + ) expected.append("script2.py") - self.write_script(dir, "script2.py", - ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) + self.write_script( + dir, + "script2.py", + ("#!/usr/bin/python\n" "# bogus script w/ Python sh-bang\n" "pass\n"), + ) expected.append("shell.sh") - self.write_script(dir, "shell.sh", - ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) + self.write_script( + dir, + "shell.sh", + ("#!/bin/sh\n" "# bogus shell script w/ sh-bang\n" "exit 0\n"), + ) return expected def write_script(self, dir, name, text): @@ -82,10 +87,9 @@ class BuildScriptsTestCase(support.TempdirManager, target = self.mkdtemp() expected = self.write_sample_scripts(source) - - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) + cmd = self.get_build_scripts_cmd( + target, [os.path.join(source, fn) for fn in expected] + ) cmd.finalize_options() # http://bugs.python.org/issue4524 @@ -105,8 +109,10 @@ class BuildScriptsTestCase(support.TempdirManager, for name in expected: self.assertIn(name, built) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(BuildScriptsTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_check.py b/setuptools/_distutils/tests/test_check.py index 2414d6e..424c5e0 100644 --- a/setuptools/_distutils/tests/test_check.py +++ b/setuptools/_distutils/tests/test_check.py @@ -17,10 +17,7 @@ except ImportError: HERE = os.path.dirname(__file__) -class CheckTestCase(support.LoggingSilencer, - support.TempdirManager, - unittest.TestCase): - +class CheckTestCase(support.LoggingSilencer, support.TempdirManager, unittest.TestCase): def _run(self, metadata=None, cwd=None, **options): if metadata is None: metadata = {} @@ -48,9 +45,13 @@ class CheckTestCase(support.LoggingSilencer, # now let's add the required fields # and run it again, to make sure we don't get # any warning anymore - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx'} + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + } cmd = self._run(metadata) self.assertEqual(cmd._warnings, 0) @@ -63,11 +64,15 @@ class CheckTestCase(support.LoggingSilencer, self.assertEqual(cmd._warnings, 0) # now a test with non-ASCII characters - metadata = {'url': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': 'xxx', - 'description': 'Something about esszet \u00df', - 'long_description': 'More things about esszet \u00df'} + metadata = { + 'url': 'xxx', + 'author': '\u00c9ric', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'description': 'Something about esszet \u00df', + 'long_description': 'More things about esszet \u00df', + } cmd = self._run(metadata) self.assertEqual(cmd._warnings, 0) @@ -75,9 +80,12 @@ class CheckTestCase(support.LoggingSilencer, for kind in ("author", "maintainer"): # ensure no warning when author_email or maintainer_email is given # (the spec allows these fields to take the form "Name ") - metadata = {'url': 'xxx', - kind + '_email': 'Name ', - 'name': 'xxx', 'version': 'xxx'} + metadata = { + 'url': 'xxx', + kind + '_email': 'Name ', + 'name': 'xxx', + 'version': 'xxx', + } cmd = self._run(metadata) self.assertEqual(cmd._warnings, 0) @@ -117,12 +125,20 @@ class CheckTestCase(support.LoggingSilencer, self.assertEqual(cmd._warnings, 1) # let's see if we have an error with strict=1 - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', - 'long_description': broken_rest} - self.assertRaises(DistutilsSetupError, self._run, metadata, - **{'strict': 1, 'restructuredtext': 1}) + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'long_description': broken_rest, + } + self.assertRaises( + DistutilsSetupError, + self._run, + metadata, + **{'strict': 1, 'restructuredtext': 1} + ) # and non-broken rest, including a non-ASCII character to test #12114 metadata['long_description'] = 'title\n=====\n\ntest \u00df' @@ -139,22 +155,30 @@ class CheckTestCase(support.LoggingSilencer, # Don't fail if there is a `code` or `code-block` directive example_rst_docs = [] - example_rst_docs.append(textwrap.dedent("""\ + example_rst_docs.append( + textwrap.dedent( + """\ Here's some code: .. code:: python def foo(): pass - """)) - example_rst_docs.append(textwrap.dedent("""\ + """ + ) + ) + example_rst_docs.append( + textwrap.dedent( + """\ Here's some code: .. code-block:: python def foo(): pass - """)) + """ + ) + ) for rest_with_code in example_rst_docs: pkg_info, dist = self.create_dist(long_description=rest_with_code) @@ -166,19 +190,20 @@ class CheckTestCase(support.LoggingSilencer, else: self.assertEqual(len(msgs), 1) self.assertEqual( - str(msgs[0][1]), - 'Cannot analyze code. Pygments package not found.' + str(msgs[0][1]), 'Cannot analyze code. Pygments package not found.' ) def test_check_all(self): metadata = {'url': 'xxx', 'author': 'xxx'} - self.assertRaises(DistutilsSetupError, self._run, - {}, **{'strict': 1, - 'restructuredtext': 1}) + self.assertRaises( + DistutilsSetupError, self._run, {}, **{'strict': 1, 'restructuredtext': 1} + ) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(CheckTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_clean.py b/setuptools/_distutils/tests/test_clean.py index 9236749..92e58f7 100644 --- a/setuptools/_distutils/tests/test_clean.py +++ b/setuptools/_distutils/tests/test_clean.py @@ -6,18 +6,23 @@ from distutils.command.clean import clean from distutils.tests import support from test.support import run_unittest -class cleanTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): +class cleanTestCase(support.TempdirManager, support.LoggingSilencer, unittest.TestCase): def test_simple_run(self): pkg_dir, dist = self.create_dist() cmd = clean(dist) # let's add some elements clean should remove - dirs = [(d, os.path.join(pkg_dir, d)) - for d in ('build_temp', 'build_lib', 'bdist_base', - 'build_scripts', 'build_base')] + dirs = [ + (d, os.path.join(pkg_dir, d)) + for d in ( + 'build_temp', + 'build_lib', + 'bdist_base', + 'build_scripts', + 'build_base', + ) + ] for name, path in dirs: os.mkdir(path) @@ -34,16 +39,17 @@ class cleanTestCase(support.TempdirManager, # make sure the files where removed for name, path in dirs: - self.assertFalse(os.path.exists(path), - '%s was not removed' % path) + self.assertFalse(os.path.exists(path), '%s was not removed' % path) # let's run the command again (should spit warnings but succeed) cmd.all = 1 cmd.ensure_finalized() cmd.run() + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(cleanTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_cmd.py b/setuptools/_distutils/tests/test_cmd.py index 2319214..12a8a20 100644 --- a/setuptools/_distutils/tests/test_cmd.py +++ b/setuptools/_distutils/tests/test_cmd.py @@ -8,12 +8,13 @@ from distutils.dist import Distribution from distutils.errors import DistutilsOptionError from distutils import debug + class MyCmd(Command): def initialize_options(self): pass -class CommandTestCase(unittest.TestCase): +class CommandTestCase(unittest.TestCase): def setUp(self): dist = Distribution() self.cmd = MyCmd(dist) @@ -28,11 +29,13 @@ class CommandTestCase(unittest.TestCase): cmd.ensure_string_list('yes_string_list') cmd.ensure_string_list('yes_string_list2') - self.assertRaises(DistutilsOptionError, - cmd.ensure_string_list, 'not_string_list') + self.assertRaises( + DistutilsOptionError, cmd.ensure_string_list, 'not_string_list' + ) - self.assertRaises(DistutilsOptionError, - cmd.ensure_string_list, 'not_string_list2') + self.assertRaises( + DistutilsOptionError, cmd.ensure_string_list, 'not_string_list2' + ) cmd.option1 = 'ok,dok' cmd.ensure_string_list('option1') @@ -42,21 +45,21 @@ class CommandTestCase(unittest.TestCase): cmd.ensure_string_list('option2') cmd.option3 = ['ok', 2] - self.assertRaises(DistutilsOptionError, cmd.ensure_string_list, - 'option3') - + self.assertRaises(DistutilsOptionError, cmd.ensure_string_list, 'option3') def test_make_file(self): cmd = self.cmd # making sure it raises when infiles is not a string or a list/tuple - self.assertRaises(TypeError, cmd.make_file, - infiles=1, outfile='', func='func', args=()) + self.assertRaises( + TypeError, cmd.make_file, infiles=1, outfile='', func='func', args=() + ) # making sure execute gets called properly def _execute(func, args, exec_msg, level): self.assertEqual(exec_msg, 'generating out from in') + cmd.force = True cmd.execute = _execute cmd.make_file(infiles='in', outfile='out', func='func', args=()) @@ -64,8 +67,10 @@ class CommandTestCase(unittest.TestCase): def test_dump_options(self): msgs = [] + def _announce(msg, level): msgs.append(msg) + cmd = self.cmd cmd.announce = _announce cmd.option1 = 1 @@ -73,8 +78,7 @@ class CommandTestCase(unittest.TestCase): cmd.user_options = [('option1', '', ''), ('option2', '', '')] cmd.dump_options() - wanted = ["command options for 'MyCmd':", ' option1 = 1', - ' option2 = 1'] + wanted = ["command options for 'MyCmd':", ' option1 = 1', ' option2 = 1'] self.assertEqual(msgs, wanted) def test_ensure_string(self): @@ -119,8 +123,10 @@ class CommandTestCase(unittest.TestCase): finally: debug.DEBUG = False + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(CommandTestCase) + if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_config.py b/setuptools/_distutils/tests/test_config.py index 27bd9d4..a4b4850 100644 --- a/setuptools/_distutils/tests/test_config.py +++ b/setuptools/_distutils/tests/test_config.py @@ -50,11 +50,12 @@ password:xxx """ -class BasePyPIRCCommandTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - +class BasePyPIRCCommandTestCase( + support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase, +): def setUp(self): """Patches the environment.""" super(BasePyPIRCCommandTestCase, self).setUp() @@ -67,8 +68,10 @@ class BasePyPIRCCommandTestCase(support.TempdirManager, class command(PyPIRCCommand): def __init__(self, dist): super().__init__(dist) + def initialize_options(self): pass + finalize_options = initialize_options self._cmd = command @@ -81,7 +84,6 @@ class BasePyPIRCCommandTestCase(support.TempdirManager, class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase): - def test_server_registration(self): # This test makes sure PyPIRCCommand knows how to: # 1. handle several sections in .pypirc @@ -93,18 +95,26 @@ class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase): config = cmd._read_pypirc() config = list(sorted(config.items())) - waited = [('password', 'secret'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server1'), ('username', 'me')] + waited = [ + ('password', 'secret'), + ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ('server', 'server1'), + ('username', 'me'), + ] self.assertEqual(config, waited) # old format self.write_file(self.rc, PYPIRC_OLD) config = cmd._read_pypirc() config = list(sorted(config.items())) - waited = [('password', 'secret'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server-login'), ('username', 'tarek')] + waited = [ + ('password', 'secret'), + ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ('server', 'server-login'), + ('username', 'tarek'), + ] self.assertEqual(config, waited) def test_server_empty_registration(self): @@ -128,14 +138,19 @@ class PyPIRCCommandTestCase(BasePyPIRCCommandTestCase): config = cmd._read_pypirc() config = list(sorted(config.items())) - waited = [('password', 'yh^%#rest-of-my-password'), ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/'), - ('server', 'server3'), ('username', 'cbiggles')] + waited = [ + ('password', 'yh^%#rest-of-my-password'), + ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ('server', 'server3'), + ('username', 'cbiggles'), + ] self.assertEqual(config, waited) def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(PyPIRCCommandTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_config_cmd.py b/setuptools/_distutils/tests/test_config_cmd.py index 2c84719..0c1a9d2 100644 --- a/setuptools/_distutils/tests/test_config_cmd.py +++ b/setuptools/_distutils/tests/test_config_cmd.py @@ -2,18 +2,16 @@ import unittest import os import sys -from test.support import run_unittest - -from .py35compat import missing_compiler_executable +from test.support import run_unittest, missing_compiler_executable from distutils.command.config import dump_file, config from distutils.tests import support from distutils import log -class ConfigTestCase(support.LoggingSilencer, - support.TempdirManager, - unittest.TestCase): +class ConfigTestCase( + support.LoggingSilencer, support.TempdirManager, unittest.TestCase +): def _info(self, msg, *args): for line in msg.splitlines(): self._logs.append(line) @@ -37,7 +35,7 @@ class ConfigTestCase(support.LoggingSilencer, f.close() dump_file(this_file, 'I am the header') - self.assertEqual(len(self._logs), numlines+1) + self.assertEqual(len(self._logs), numlines + 1) @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") def test_search_cpp(self): @@ -49,7 +47,9 @@ class ConfigTestCase(support.LoggingSilencer, cmd._check_compiler() compiler = cmd.compiler if sys.platform[:3] == "aix" and "xlc" in compiler.preprocessor[0].lower(): - self.skipTest('xlc: The -E option overrides the -P, -o, and -qsyntaxonly options') + self.skipTest( + 'xlc: The -E option overrides the -P, -o, and -qsyntaxonly options' + ) # simple pattern searches match = cmd.search_cpp(pattern='xxx', body='/* xxx */') @@ -91,8 +91,10 @@ class ConfigTestCase(support.LoggingSilencer, for f in (f1, f2): self.assertFalse(os.path.exists(f)) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(ConfigTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_core.py b/setuptools/_distutils/tests/test_core.py index 7270d69..23402fb 100644 --- a/setuptools/_distutils/tests/test_core.py +++ b/setuptools/_distutils/tests/test_core.py @@ -56,8 +56,8 @@ if __name__ == "__main__": main() """ -class CoreTestCase(support.EnvironGuard, unittest.TestCase): +class CoreTestCase(support.EnvironGuard, unittest.TestCase): def setUp(self): super(CoreTestCase, self).setUp() self.old_stdout = sys.stdout @@ -90,21 +90,18 @@ class CoreTestCase(support.EnvironGuard, unittest.TestCase): def test_run_setup_provides_file(self): # Make sure the script can use __file__; if that's missing, the test # setup.py script will raise NameError. - distutils.core.run_setup( - self.write_setup(setup_using___file__)) + distutils.core.run_setup(self.write_setup(setup_using___file__)) def test_run_setup_preserves_sys_argv(self): # Make sure run_setup does not clobber sys.argv argv_copy = sys.argv.copy() - distutils.core.run_setup( - self.write_setup(setup_does_nothing)) + distutils.core.run_setup(self.write_setup(setup_does_nothing)) self.assertEqual(sys.argv, argv_copy) def test_run_setup_defines_subclass(self): # Make sure the script can use __file__; if that's missing, the test # setup.py script will raise NameError. - dist = distutils.core.run_setup( - self.write_setup(setup_defines_subclass)) + dist = distutils.core.run_setup(self.write_setup(setup_defines_subclass)) install = dist.get_command_obj('install') self.assertIn('cmd', install.sub_commands) @@ -118,8 +115,7 @@ class CoreTestCase(support.EnvironGuard, unittest.TestCase): # Create a directory and write the setup.py file there: os.mkdir(os_helper.TESTFN) setup_py = os.path.join(os_helper.TESTFN, "setup.py") - distutils.core.run_setup( - self.write_setup(setup_prints_cwd, path=setup_py)) + distutils.core.run_setup(self.write_setup(setup_prints_cwd, path=setup_py)) output = sys.stdout.getvalue() if output.endswith("\n"): @@ -128,14 +124,16 @@ class CoreTestCase(support.EnvironGuard, unittest.TestCase): def test_run_setup_within_if_main(self): dist = distutils.core.run_setup( - self.write_setup(setup_within_if_main), stop_after="config") + self.write_setup(setup_within_if_main), stop_after="config" + ) self.assertIsInstance(dist, Distribution) self.assertEqual(dist.get_name(), "setup_within_if_main") def test_run_commands(self): sys.argv = ['setup.py', 'build'] dist = distutils.core.run_setup( - self.write_setup(setup_within_if_main), stop_after="commandline") + self.write_setup(setup_within_if_main), stop_after="commandline" + ) self.assertNotIn('build', dist.have_run) distutils.core.run_commands(dist) self.assertIn('build', dist.have_run) @@ -158,8 +156,10 @@ class CoreTestCase(support.EnvironGuard, unittest.TestCase): wanted = "options (after parsing config files):\n" self.assertEqual(stdout.readlines()[0], wanted) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(CoreTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_cygwinccompiler.py b/setuptools/_distutils/tests/test_cygwinccompiler.py index 8715a53..b3c164e 100644 --- a/setuptools/_distutils/tests/test_cygwinccompiler.py +++ b/setuptools/_distutils/tests/test_cygwinccompiler.py @@ -4,39 +4,58 @@ import sys import os from test.support import run_unittest -from distutils.cygwinccompiler import (check_config_h, - CONFIG_H_OK, CONFIG_H_NOTOK, - CONFIG_H_UNCERTAIN, - get_msvcr) +from distutils.cygwinccompiler import ( + check_config_h, + CONFIG_H_OK, + CONFIG_H_NOTOK, + CONFIG_H_UNCERTAIN, + get_msvcr, +) from distutils.tests import support -class CygwinCCompilerTestCase(support.TempdirManager, - unittest.TestCase): - +class CygwinCCompilerTestCase(support.TempdirManager, unittest.TestCase): def setUp(self): super(CygwinCCompilerTestCase, self).setUp() self.version = sys.version self.python_h = os.path.join(self.mkdtemp(), 'python.h') from distutils import sysconfig + self.old_get_config_h_filename = sysconfig.get_config_h_filename sysconfig.get_config_h_filename = self._get_config_h_filename def tearDown(self): sys.version = self.version from distutils import sysconfig + sysconfig.get_config_h_filename = self.old_get_config_h_filename super(CygwinCCompilerTestCase, self).tearDown() def _get_config_h_filename(self): return self.python_h + @unittest.skipIf(sys.platform != "cygwin", "Not running on Cygwin") + @unittest.skipIf( + not os.path.exists("/usr/lib/libbash.dll.a"), "Don't know a linkable library" + ) + def test_find_library_file(self): + from distutils.cygwinccompiler import CygwinCCompiler + + compiler = CygwinCCompiler() + link_name = "bash" + linkable_file = compiler.find_library_file(["/usr/lib"], link_name) + self.assertIsNotNone(linkable_file) + self.assertTrue(os.path.exists(linkable_file)) + self.assertEquals(linkable_file, "/usr/lib/lib{:s}.dll.a".format(link_name)) + def test_check_config_h(self): # check_config_h looks for "GCC" in sys.version first # returns CONFIG_H_OK if found - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC ' - '4.0.1 (Apple Computer, Inc. build 5370)]') + sys.version = ( + '2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC ' + '4.0.1 (Apple Computer, Inc. build 5370)]' + ) self.assertEqual(check_config_h()[0], CONFIG_H_OK) @@ -57,40 +76,49 @@ class CygwinCCompilerTestCase(support.TempdirManager, def test_get_msvcr(self): # none - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' - '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]') + sys.version = ( + '2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' + '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]' + ) self.assertEqual(get_msvcr(), None) # MSVC 7.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1300 32 bits (Intel)]') + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1300 32 bits (Intel)]' + ) self.assertEqual(get_msvcr(), ['msvcr70']) # MSVC 7.1 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1310 32 bits (Intel)]') + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1310 32 bits (Intel)]' + ) self.assertEqual(get_msvcr(), ['msvcr71']) # VS2005 / MSVC 8.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1400 32 bits (Intel)]') + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1400 32 bits (Intel)]' + ) self.assertEqual(get_msvcr(), ['msvcr80']) # VS2008 / MSVC 9.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1500 32 bits (Intel)]') + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.1500 32 bits (Intel)]' + ) self.assertEqual(get_msvcr(), ['msvcr90']) - + sys.version = '3.10.0 (tags/v3.10.0:b494f59, Oct 4 2021, 18:46:30) [MSC v.1929 32 bit (Intel)]' self.assertEqual(get_msvcr(), ['ucrt', 'vcruntime140']) # unknown - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.2000 32 bits (Intel)]') + sys.version = ( + '2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' '[MSC v.2000 32 bits (Intel)]' + ) self.assertRaises(ValueError, get_msvcr) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(CygwinCCompilerTestCase) + if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_dep_util.py b/setuptools/_distutils/tests/test_dep_util.py index 0d52740..89ae05d 100644 --- a/setuptools/_distutils/tests/test_dep_util.py +++ b/setuptools/_distutils/tests/test_dep_util.py @@ -7,8 +7,8 @@ from distutils.errors import DistutilsFileError from distutils.tests import support from test.support import run_unittest -class DepUtilTestCase(support.TempdirManager, unittest.TestCase): +class DepUtilTestCase(support.TempdirManager, unittest.TestCase): def test_newer(self): tmpdir = self.mkdtemp() @@ -36,14 +36,13 @@ class DepUtilTestCase(support.TempdirManager, unittest.TestCase): os.mkdir(targets) one = os.path.join(sources, 'one') two = os.path.join(sources, 'two') - three = os.path.abspath(__file__) # I am the old file + three = os.path.abspath(__file__) # I am the old file four = os.path.join(targets, 'four') self.write_file(one) self.write_file(two) self.write_file(four) - self.assertEqual(newer_pairwise([one, two], [three, four]), - ([one],[three])) + self.assertEqual(newer_pairwise([one, two], [three, four]), ([one], [three])) def test_newer_group(self): tmpdir = self.mkdtemp() @@ -66,15 +65,14 @@ class DepUtilTestCase(support.TempdirManager, unittest.TestCase): os.remove(one) self.assertRaises(OSError, newer_group, [one, two, old_file], three) - self.assertFalse(newer_group([one, two, old_file], three, - missing='ignore')) + self.assertFalse(newer_group([one, two, old_file], three, missing='ignore')) - self.assertTrue(newer_group([one, two, old_file], three, - missing='newer')) + self.assertTrue(newer_group([one, two, old_file], three, missing='newer')) def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(DepUtilTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_dir_util.py b/setuptools/_distutils/tests/test_dir_util.py index 1b1f3bb..a1f9a24 100644 --- a/setuptools/_distutils/tests/test_dir_util.py +++ b/setuptools/_distutils/tests/test_dir_util.py @@ -6,8 +6,13 @@ import sys from unittest.mock import patch from distutils import dir_util, errors -from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree, - ensure_relative) +from distutils.dir_util import ( + mkpath, + remove_tree, + create_tree, + copy_tree, + ensure_relative, +) from distutils import log from distutils.tests import support @@ -15,7 +20,6 @@ from test.support import run_unittest class DirUtilTestCase(support.TempdirManager, unittest.TestCase): - def _log(self, msg, *args): if len(args) > 0: self._logs.append(msg % args) @@ -44,8 +48,7 @@ class DirUtilTestCase(support.TempdirManager, unittest.TestCase): remove_tree(self.root_target, verbose=0) mkpath(self.target, verbose=1) - wanted = ['creating %s' % self.root_target, - 'creating %s' % self.target] + wanted = ['creating %s' % self.root_target, 'creating %s' % self.target] self.assertEqual(self._logs, wanted) self._logs = [] @@ -53,18 +56,18 @@ class DirUtilTestCase(support.TempdirManager, unittest.TestCase): wanted = ["removing '%s' (and everything under it)" % self.root_target] self.assertEqual(self._logs, wanted) - @unittest.skipIf(sys.platform.startswith('win'), - "This test is only appropriate for POSIX-like systems.") + @unittest.skipIf( + sys.platform.startswith('win'), + "This test is only appropriate for POSIX-like systems.", + ) def test_mkpath_with_custom_mode(self): # Get and set the current umask value for testing mode bits. umask = os.umask(0o002) os.umask(umask) mkpath(self.target, 0o700) - self.assertEqual( - stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask) + self.assertEqual(stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask) mkpath(self.target2, 0o555) - self.assertEqual( - stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask) + self.assertEqual(stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask) def test_create_tree_verbosity(self): @@ -118,7 +121,7 @@ class DirUtilTestCase(support.TempdirManager, unittest.TestCase): if os.sep == '/': self.assertEqual(ensure_relative('/home/foo'), 'home/foo') self.assertEqual(ensure_relative('some/path'), 'some/path') - else: # \\ + else: # \\ self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo') self.assertEqual(ensure_relative('home\\foo'), 'home\\foo') @@ -126,8 +129,9 @@ class DirUtilTestCase(support.TempdirManager, unittest.TestCase): """ An exception in listdir should raise a DistutilsFileError """ - with patch("os.listdir", side_effect=OSError()), \ - self.assertRaises(errors.DistutilsFileError): + with patch("os.listdir", side_effect=OSError()), self.assertRaises( + errors.DistutilsFileError + ): src = self.tempdirs[-1] dir_util.copy_tree(src, None) @@ -135,5 +139,6 @@ class DirUtilTestCase(support.TempdirManager, unittest.TestCase): def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(DirUtilTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_dist.py b/setuptools/_distutils/tests/test_dist.py index 9132bc0..6520a46 100644 --- a/setuptools/_distutils/tests/test_dist.py +++ b/setuptools/_distutils/tests/test_dist.py @@ -11,9 +11,7 @@ from unittest import mock from distutils.dist import Distribution, fix_help_options from distutils.cmd import Command -from test.support import ( - captured_stdout, captured_stderr, run_unittest -) +from test.support import captured_stdout, captured_stderr, run_unittest from .py38compat import TESTFN from distutils.tests import support from distutils import log @@ -42,11 +40,12 @@ class TestDistribution(Distribution): return self._config_files -class DistributionTestCase(support.LoggingSilencer, - support.TempdirManager, - support.EnvironGuard, - unittest.TestCase): - +class DistributionTestCase( + support.LoggingSilencer, + support.TempdirManager, + support.EnvironGuard, + unittest.TestCase, +): def setUp(self): super(DistributionTestCase, self).setUp() self.argv = sys.argv, sys.argv[:] @@ -71,15 +70,21 @@ class DistributionTestCase(support.LoggingSilencer, def test_command_packages_cmdline(self): from distutils.tests.test_dist import test_dist - sys.argv.extend(["--command-packages", - "foo.bar,distutils.tests", - "test_dist", - "-Ssometext", - ]) + + sys.argv.extend( + [ + "--command-packages", + "foo.bar,distutils.tests", + "test_dist", + "-Ssometext", + ] + ) d = self.create_distribution() # let's actually try to load our test command: - self.assertEqual(d.get_command_packages(), - ["distutils.command", "foo.bar", "distutils.tests"]) + self.assertEqual( + d.get_command_packages(), + ["distutils.command", "foo.bar", "distutils.tests"], + ) cmd = d.get_command_obj("test_dist") self.assertIsInstance(cmd, test_dist) self.assertEqual(cmd.sample_option, "sometext") @@ -95,20 +100,25 @@ class DistributionTestCase(support.LoggingSilencer, fakepath = '/somedir' with open(TESTFN, "w") as f: - print(("[install]\n" - "install-base = {0}\n" - "install-platbase = {0}\n" - "install-lib = {0}\n" - "install-platlib = {0}\n" - "install-purelib = {0}\n" - "install-headers = {0}\n" - "install-scripts = {0}\n" - "install-data = {0}\n" - "prefix = {0}\n" - "exec-prefix = {0}\n" - "home = {0}\n" - "user = {0}\n" - "root = {0}").format(fakepath), file=f) + print( + ( + "[install]\n" + "install-base = {0}\n" + "install-platbase = {0}\n" + "install-lib = {0}\n" + "install-platlib = {0}\n" + "install-purelib = {0}\n" + "install-headers = {0}\n" + "install-scripts = {0}\n" + "install-data = {0}\n" + "prefix = {0}\n" + "exec-prefix = {0}\n" + "home = {0}\n" + "user = {0}\n" + "root = {0}" + ).format(fakepath), + file=f, + ) # Base case: Not in a Virtual Environment with mock.patch.multiple(sys, prefix='/a', base_prefix='/a') as values: @@ -133,8 +143,8 @@ class DistributionTestCase(support.LoggingSilencer, } self.assertEqual( - sorted(d.command_options.get('install').keys()), - sorted(result_dict.keys())) + sorted(d.command_options.get('install').keys()), sorted(result_dict.keys()) + ) for (key, value) in d.command_options.get('install').items(): self.assertEqual(value, result_dict[key]) @@ -157,14 +167,14 @@ class DistributionTestCase(support.LoggingSilencer, f.close() d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), - ["distutils.command", "foo.bar", "splat"]) + self.assertEqual( + d.get_command_packages(), ["distutils.command", "foo.bar", "splat"] + ) # ensure command line overrides config: sys.argv[1:] = ["--command-packages", "spork", "build"] d = self.create_distribution([TESTFN]) - self.assertEqual(d.get_command_packages(), - ["distutils.command", "spork"]) + self.assertEqual(d.get_command_packages(), ["distutils.command", "spork"]) # Setting --command-packages to '' should cause the default to # be used even if a config file specified something else: @@ -184,16 +194,21 @@ class DistributionTestCase(support.LoggingSilencer, self.addCleanup(setattr, warnings, 'warn', warnings.warn) warnings.warn = _warn - dist = Distribution(attrs={'author': 'xxx', 'name': 'xxx', - 'version': 'xxx', 'url': 'xxxx', - 'options': {}}) + dist = Distribution( + attrs={ + 'author': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'url': 'xxxx', + 'options': {}, + } + ) self.assertEqual(len(warns), 0) self.assertNotIn('options', dir(dist)) def test_finalize_options(self): - attrs = {'keywords': 'one,two', - 'platforms': 'one,two'} + attrs = {'keywords': 'one,two', 'platforms': 'one,two'} dist = Distribution(attrs=attrs) dist.finalize_options() @@ -202,8 +217,7 @@ class DistributionTestCase(support.LoggingSilencer, self.assertEqual(dist.metadata.platforms, ['one', 'two']) self.assertEqual(dist.metadata.keywords, ['one', 'two']) - attrs = {'keywords': 'foo bar', - 'platforms': 'foo bar'} + attrs = {'keywords': 'foo bar', 'platforms': 'foo bar'} dist = Distribution(attrs=attrs) dist.finalize_options() self.assertEqual(dist.metadata.platforms, ['foo bar']) @@ -214,8 +228,7 @@ class DistributionTestCase(support.LoggingSilencer, self.assertEqual(dist.command_packages, None) cmds = dist.get_command_packages() self.assertEqual(cmds, ['distutils.command']) - self.assertEqual(dist.command_packages, - ['distutils.command']) + self.assertEqual(dist.command_packages, ['distutils.command']) dist.command_packages = 'one,two' cmds = dist.get_command_packages() @@ -228,7 +241,6 @@ class DistributionTestCase(support.LoggingSilencer, kwargs = {'level': 'ok2'} self.assertRaises(ValueError, dist.announce, args, kwargs) - def test_find_config_files_disable(self): # Ticket #1180: Allow user to disable their home config file. temp_home = self.mkdtemp() @@ -255,11 +267,10 @@ class DistributionTestCase(support.LoggingSilencer, os.path.expanduser = old_expander # make sure --no-user-cfg disables the user cfg file - self.assertEqual(len(all_files)-1, len(files)) + self.assertEqual(len(all_files) - 1, len(files)) -class MetadataTestCase(support.TempdirManager, support.EnvironGuard, - unittest.TestCase): +class MetadataTestCase(support.TempdirManager, support.EnvironGuard, unittest.TestCase): def setUp(self): super(MetadataTestCase, self).setUp() self.argv = sys.argv, sys.argv[:] @@ -275,8 +286,7 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, return sio.getvalue() def test_simple_metadata(self): - attrs = {"name": "package", - "version": "1.0"} + attrs = {"name": "package", "version": "1.0"} dist = Distribution(attrs) meta = self.format_metadata(dist) self.assertIn("Metadata-Version: 1.0", meta) @@ -285,34 +295,35 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, self.assertNotIn("obsoletes:", meta.lower()) def test_provides(self): - attrs = {"name": "package", - "version": "1.0", - "provides": ["package", "package.sub"]} + attrs = { + "name": "package", + "version": "1.0", + "provides": ["package", "package.sub"], + } dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_provides(), - ["package", "package.sub"]) - self.assertEqual(dist.get_provides(), - ["package", "package.sub"]) + self.assertEqual(dist.metadata.get_provides(), ["package", "package.sub"]) + self.assertEqual(dist.get_provides(), ["package", "package.sub"]) meta = self.format_metadata(dist) self.assertIn("Metadata-Version: 1.1", meta) self.assertNotIn("requires:", meta.lower()) self.assertNotIn("obsoletes:", meta.lower()) def test_provides_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "provides": ["my.pkg (splat)"]}) + self.assertRaises( + ValueError, + Distribution, + {"name": "package", "version": "1.0", "provides": ["my.pkg (splat)"]}, + ) def test_requires(self): - attrs = {"name": "package", - "version": "1.0", - "requires": ["other", "another (==1.0)"]} + attrs = { + "name": "package", + "version": "1.0", + "requires": ["other", "another (==1.0)"], + } dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_requires(), - ["other", "another (==1.0)"]) - self.assertEqual(dist.get_requires(), - ["other", "another (==1.0)"]) + self.assertEqual(dist.metadata.get_requires(), ["other", "another (==1.0)"]) + self.assertEqual(dist.get_requires(), ["other", "another (==1.0)"]) meta = self.format_metadata(dist) self.assertIn("Metadata-Version: 1.1", meta) self.assertNotIn("provides:", meta.lower()) @@ -321,27 +332,26 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, self.assertNotIn("obsoletes:", meta.lower()) def test_requires_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "requires": ["my.pkg (splat)"]}) + self.assertRaises( + ValueError, + Distribution, + {"name": "package", "version": "1.0", "requires": ["my.pkg (splat)"]}, + ) def test_requires_to_list(self): - attrs = {"name": "package", - "requires": iter(["other"])} + attrs = {"name": "package", "requires": iter(["other"])} dist = Distribution(attrs) self.assertIsInstance(dist.metadata.requires, list) - def test_obsoletes(self): - attrs = {"name": "package", - "version": "1.0", - "obsoletes": ["other", "another (<1.0)"]} + attrs = { + "name": "package", + "version": "1.0", + "obsoletes": ["other", "another (<1.0)"], + } dist = Distribution(attrs) - self.assertEqual(dist.metadata.get_obsoletes(), - ["other", "another (<1.0)"]) - self.assertEqual(dist.get_obsoletes(), - ["other", "another (<1.0)"]) + self.assertEqual(dist.metadata.get_obsoletes(), ["other", "another (<1.0)"]) + self.assertEqual(dist.get_obsoletes(), ["other", "another (<1.0)"]) meta = self.format_metadata(dist) self.assertIn("Metadata-Version: 1.1", meta) self.assertNotIn("provides:", meta.lower()) @@ -350,48 +360,59 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, self.assertIn("Obsoletes: another (<1.0)", meta) def test_obsoletes_illegal(self): - self.assertRaises(ValueError, Distribution, - {"name": "package", - "version": "1.0", - "obsoletes": ["my.pkg (splat)"]}) + self.assertRaises( + ValueError, + Distribution, + {"name": "package", "version": "1.0", "obsoletes": ["my.pkg (splat)"]}, + ) def test_obsoletes_to_list(self): - attrs = {"name": "package", - "obsoletes": iter(["other"])} + attrs = {"name": "package", "obsoletes": iter(["other"])} dist = Distribution(attrs) self.assertIsInstance(dist.metadata.obsoletes, list) def test_classifier(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'classifiers': ['Programming Language :: Python :: 3']} + attrs = { + 'name': 'Boa', + 'version': '3.0', + 'classifiers': ['Programming Language :: Python :: 3'], + } dist = Distribution(attrs) - self.assertEqual(dist.get_classifiers(), - ['Programming Language :: Python :: 3']) + self.assertEqual( + dist.get_classifiers(), ['Programming Language :: Python :: 3'] + ) meta = self.format_metadata(dist) self.assertIn('Metadata-Version: 1.1', meta) def test_classifier_invalid_type(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'classifiers': ('Programming Language :: Python :: 3',)} + attrs = { + 'name': 'Boa', + 'version': '3.0', + 'classifiers': ('Programming Language :: Python :: 3',), + } with captured_stderr() as error: d = Distribution(attrs) # should have warning about passing a non-list self.assertIn('should be a list', error.getvalue()) # should be converted to a list self.assertIsInstance(d.metadata.classifiers, list) - self.assertEqual(d.metadata.classifiers, - list(attrs['classifiers'])) + self.assertEqual(d.metadata.classifiers, list(attrs['classifiers'])) def test_keywords(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'keywords': ['spam', 'eggs', 'life of brian']} + attrs = { + 'name': 'Monty', + 'version': '1.0', + 'keywords': ['spam', 'eggs', 'life of brian'], + } dist = Distribution(attrs) - self.assertEqual(dist.get_keywords(), - ['spam', 'eggs', 'life of brian']) + self.assertEqual(dist.get_keywords(), ['spam', 'eggs', 'life of brian']) def test_keywords_invalid_type(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'keywords': ('spam', 'eggs', 'life of brian')} + attrs = { + 'name': 'Monty', + 'version': '1.0', + 'keywords': ('spam', 'eggs', 'life of brian'), + } with captured_stderr() as error: d = Distribution(attrs) # should have warning about passing a non-list @@ -401,15 +422,20 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, self.assertEqual(d.metadata.keywords, list(attrs['keywords'])) def test_platforms(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'platforms': ['GNU/Linux', 'Some Evil Platform']} + attrs = { + 'name': 'Monty', + 'version': '1.0', + 'platforms': ['GNU/Linux', 'Some Evil Platform'], + } dist = Distribution(attrs) - self.assertEqual(dist.get_platforms(), - ['GNU/Linux', 'Some Evil Platform']) + self.assertEqual(dist.get_platforms(), ['GNU/Linux', 'Some Evil Platform']) def test_platforms_invalid_types(self): - attrs = {'name': 'Monty', 'version': '1.0', - 'platforms': ('GNU/Linux', 'Some Evil Platform')} + attrs = { + 'name': 'Monty', + 'version': '1.0', + 'platforms': ('GNU/Linux', 'Some Evil Platform'), + } with captured_stderr() as error: d = Distribution(attrs) # should have warning about passing a non-list @@ -419,21 +445,24 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, self.assertEqual(d.metadata.platforms, list(attrs['platforms'])) def test_download_url(self): - attrs = {'name': 'Boa', 'version': '3.0', - 'download_url': 'http://example.org/boa'} + attrs = { + 'name': 'Boa', + 'version': '3.0', + 'download_url': 'http://example.org/boa', + } dist = Distribution(attrs) meta = self.format_metadata(dist) self.assertIn('Metadata-Version: 1.1', meta) def test_long_description(self): - long_desc = textwrap.dedent("""\ + long_desc = textwrap.dedent( + """\ example:: We start here and continue here - and end here.""") - attrs = {"name": "package", - "version": "1.0", - "long_description": long_desc} + and end here.""" + ) + attrs = {"name": "package", "version": "1.0", "long_description": long_desc} dist = Distribution(attrs) meta = self.format_metadata(dist) @@ -470,8 +499,9 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, # home drive should be found os.environ['USERPROFILE'] = temp_dir files = dist.find_config_files() - self.assertIn(user_filename, files, - '%r not found in %r' % (user_filename, files)) + self.assertIn( + user_filename, files, '%r not found in %r' % (user_filename, files) + ) finally: os.remove(user_filename) @@ -491,19 +521,19 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, with captured_stdout() as s: dist.parse_command_line() - output = [line for line in s.getvalue().split('\n') - if line.strip() != ''] + output = [line for line in s.getvalue().split('\n') if line.strip() != ''] self.assertTrue(output) - def test_read_metadata(self): - attrs = {"name": "package", - "version": "1.0", - "long_description": "desc", - "description": "xxx", - "download_url": "http://example.com", - "keywords": ['one', 'two'], - "requires": ['foo']} + attrs = { + "name": "package", + "version": "1.0", + "long_description": "desc", + "description": "xxx", + "download_url": "http://example.com", + "keywords": ['one', 'two'], + "requires": ['foo'], + } dist = Distribution(attrs) metadata = dist.metadata @@ -523,11 +553,13 @@ class MetadataTestCase(support.TempdirManager, support.EnvironGuard, self.assertEqual(metadata.obsoletes, None) self.assertEqual(metadata.requires, ['foo']) + def test_suite(): suite = unittest.TestSuite() suite.addTest(unittest.TestLoader().loadTestsFromTestCase(DistributionTestCase)) suite.addTest(unittest.TestLoader().loadTestsFromTestCase(MetadataTestCase)) return suite + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_extension.py b/setuptools/_distutils/tests/test_extension.py index 78a55da..77fe3f8 100644 --- a/setuptools/_distutils/tests/test_extension.py +++ b/setuptools/_distutils/tests/test_extension.py @@ -8,8 +8,8 @@ from distutils.extension import read_setup_file, Extension from .py38compat import check_warnings -class ExtensionTestCase(unittest.TestCase): +class ExtensionTestCase(unittest.TestCase): def test_read_setup_file(self): # trying to read a Setup file # (sample extracted from the PyGame project) @@ -21,14 +21,42 @@ class ExtensionTestCase(unittest.TestCase): # here are the extensions read_setup_file should have created # out of the file - wanted = ['_arraysurfarray', '_camera', '_numericsndarray', - '_numericsurfarray', 'base', 'bufferproxy', 'cdrom', - 'color', 'constants', 'display', 'draw', 'event', - 'fastevent', 'font', 'gfxdraw', 'image', 'imageext', - 'joystick', 'key', 'mask', 'mixer', 'mixer_music', - 'mouse', 'movie', 'overlay', 'pixelarray', 'pypm', - 'rect', 'rwobject', 'scrap', 'surface', 'surflock', - 'time', 'transform'] + wanted = [ + '_arraysurfarray', + '_camera', + '_numericsndarray', + '_numericsurfarray', + 'base', + 'bufferproxy', + 'cdrom', + 'color', + 'constants', + 'display', + 'draw', + 'event', + 'fastevent', + 'font', + 'gfxdraw', + 'image', + 'imageext', + 'joystick', + 'key', + 'mask', + 'mixer', + 'mixer_music', + 'mouse', + 'movie', + 'overlay', + 'pixelarray', + 'pypm', + 'rect', + 'rwobject', + 'scrap', + 'surface', + 'surflock', + 'time', + 'transform', + ] self.assertEqual(names, wanted) @@ -46,10 +74,20 @@ class ExtensionTestCase(unittest.TestCase): self.assertEqual(ext.sources, ['file1', 'file2']) # others arguments have defaults - for attr in ('include_dirs', 'define_macros', 'undef_macros', - 'library_dirs', 'libraries', 'runtime_library_dirs', - 'extra_objects', 'extra_compile_args', 'extra_link_args', - 'export_symbols', 'swig_opts', 'depends'): + for attr in ( + 'include_dirs', + 'define_macros', + 'undef_macros', + 'library_dirs', + 'libraries', + 'runtime_library_dirs', + 'extra_objects', + 'extra_compile_args', + 'extra_link_args', + 'export_symbols', + 'swig_opts', + 'depends', + ): self.assertEqual(getattr(ext, attr), []) self.assertEqual(ext.language, None) @@ -61,11 +99,14 @@ class ExtensionTestCase(unittest.TestCase): ext = Extension('name', ['file1', 'file2'], chic=True) self.assertEqual(len(w.warnings), 1) - self.assertEqual(str(w.warnings[0].message), - "Unknown Extension options: 'chic'") + self.assertEqual( + str(w.warnings[0].message), "Unknown Extension options: 'chic'" + ) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(ExtensionTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_file_util.py b/setuptools/_distutils/tests/test_file_util.py index 81b90d6..22898b9 100644 --- a/setuptools/_distutils/tests/test_file_util.py +++ b/setuptools/_distutils/tests/test_file_util.py @@ -13,7 +13,6 @@ from .py38compat import unlink class FileUtilTestCase(support.TempdirManager, unittest.TestCase): - def _log(self, msg, *args): if len(args) > 0: self._logs.append(msg % args) @@ -64,17 +63,18 @@ class FileUtilTestCase(support.TempdirManager, unittest.TestCase): def test_move_file_exception_unpacking_rename(self): # see issue 22182 - with patch("os.rename", side_effect=OSError("wrong", 1)), \ - self.assertRaises(DistutilsFileError): + with patch("os.rename", side_effect=OSError("wrong", 1)), self.assertRaises( + DistutilsFileError + ): with open(self.source, 'w') as fobj: fobj.write('spam eggs') move_file(self.source, self.target, verbose=0) def test_move_file_exception_unpacking_unlink(self): # see issue 22182 - with patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")), \ - patch("os.unlink", side_effect=OSError("wrong", 1)), \ - self.assertRaises(DistutilsFileError): + with patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")), patch( + "os.unlink", side_effect=OSError("wrong", 1) + ), self.assertRaises(DistutilsFileError): with open(self.source, 'w') as fobj: fobj.write('spam eggs') move_file(self.source, self.target, verbose=0) @@ -120,5 +120,6 @@ class FileUtilTestCase(support.TempdirManager, unittest.TestCase): def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(FileUtilTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_filelist.py b/setuptools/_distutils/tests/test_filelist.py index a90edcf..71718a8 100644 --- a/setuptools/_distutils/tests/test_filelist.py +++ b/setuptools/_distutils/tests/test_filelist.py @@ -11,7 +11,6 @@ from distutils import filelist from test.support import captured_stdout, run_unittest from distutils.tests import support -from .py35compat import adapt_glob from . import py38compat as os_helper @@ -36,9 +35,7 @@ def make_local_path(s): return s.replace('/', os.sep) -class FileListTestCase(support.LoggingSilencer, - unittest.TestCase): - +class FileListTestCase(support.LoggingSilencer, unittest.TestCase): def assertNoWarnings(self): self.assertEqual(self.get_logs(WARN), []) self.clear_logs() @@ -61,47 +58,53 @@ class FileListTestCase(support.LoggingSilencer, (r'foo\\*', r'(?s:foo\\\\[^%(sep)s]*)\Z'), (r'foo\\\*', r'(?s:foo\\\\\\[^%(sep)s]*)\Z'), ('foo????', r'(?s:foo[^%(sep)s][^%(sep)s][^%(sep)s][^%(sep)s])\Z'), - (r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z')): + (r'foo\\??', r'(?s:foo\\\\[^%(sep)s][^%(sep)s])\Z'), + ): regex = regex % {'sep': sep} - self.assertEqual(glob_to_re(glob), adapt_glob(regex)) + self.assertEqual(glob_to_re(glob), regex) def test_process_template_line(self): # testing all MANIFEST.in template patterns file_list = FileList() - l = make_local_path + mlp = make_local_path # simulated file list - file_list.allfiles = ['foo.tmp', 'ok', 'xo', 'four.txt', - 'buildout.cfg', - # filelist does not filter out VCS directories, - # it's sdist that does - l('.hg/last-message.txt'), - l('global/one.txt'), - l('global/two.txt'), - l('global/files.x'), - l('global/here.tmp'), - l('f/o/f.oo'), - l('dir/graft-one'), - l('dir/dir2/graft2'), - l('dir3/ok'), - l('dir3/sub/ok.txt'), - ] + file_list.allfiles = [ + 'foo.tmp', + 'ok', + 'xo', + 'four.txt', + 'buildout.cfg', + # filelist does not filter out VCS directories, + # it's sdist that does + mlp('.hg/last-message.txt'), + mlp('global/one.txt'), + mlp('global/two.txt'), + mlp('global/files.x'), + mlp('global/here.tmp'), + mlp('f/o/f.oo'), + mlp('dir/graft-one'), + mlp('dir/dir2/graft2'), + mlp('dir3/ok'), + mlp('dir3/sub/ok.txt'), + ] for line in MANIFEST_IN.split('\n'): if line.strip() == '': continue file_list.process_template_line(line) - wanted = ['ok', - 'buildout.cfg', - 'four.txt', - l('.hg/last-message.txt'), - l('global/one.txt'), - l('global/two.txt'), - l('f/o/f.oo'), - l('dir/graft-one'), - l('dir/dir2/graft2'), - ] + wanted = [ + 'ok', + 'buildout.cfg', + 'four.txt', + mlp('.hg/last-message.txt'), + mlp('global/one.txt'), + mlp('global/two.txt'), + mlp('f/o/f.oo'), + mlp('dir/graft-one'), + mlp('dir/dir2/graft2'), + ] self.assertEqual(file_list.files, wanted) @@ -135,24 +138,23 @@ class FileListTestCase(support.LoggingSilencer, def test_translate_pattern(self): # not regex - self.assertTrue(hasattr( - translate_pattern('a', anchor=True, is_regex=False), - 'search')) + self.assertTrue( + hasattr(translate_pattern('a', anchor=True, is_regex=False), 'search') + ) # is a regex regex = re.compile('a') - self.assertEqual( - translate_pattern(regex, anchor=True, is_regex=True), - regex) + self.assertEqual(translate_pattern(regex, anchor=True, is_regex=True), regex) # plain string flagged as regex - self.assertTrue(hasattr( - translate_pattern('a', anchor=True, is_regex=True), - 'search')) + self.assertTrue( + hasattr(translate_pattern('a', anchor=True, is_regex=True), 'search') + ) # glob support - self.assertTrue(translate_pattern( - '*.py', anchor=True, is_regex=False).search('filelist.py')) + self.assertTrue( + translate_pattern('*.py', anchor=True, is_regex=False).search('filelist.py') + ) def test_exclude_pattern(self): # return False if no match @@ -189,18 +191,27 @@ class FileListTestCase(support.LoggingSilencer, self.assertEqual(file_list.allfiles, ['a.py', 'b.txt']) def test_process_template(self): - l = make_local_path + mlp = make_local_path # invalid lines file_list = FileList() - for action in ('include', 'exclude', 'global-include', - 'global-exclude', 'recursive-include', - 'recursive-exclude', 'graft', 'prune', 'blarg'): - self.assertRaises(DistutilsTemplateError, - file_list.process_template_line, action) + for action in ( + 'include', + 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', + 'prune', + 'blarg', + ): + self.assertRaises( + DistutilsTemplateError, file_list.process_template_line, action + ) # include file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) + file_list.set_allfiles(['a.py', 'b.txt', mlp('d/c.py')]) file_list.process_template_line('include *.py') self.assertEqual(file_list.files, ['a.py']) @@ -212,31 +223,31 @@ class FileListTestCase(support.LoggingSilencer, # exclude file_list = FileList() - file_list.files = ['a.py', 'b.txt', l('d/c.py')] + file_list.files = ['a.py', 'b.txt', mlp('d/c.py')] file_list.process_template_line('exclude *.py') - self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) + self.assertEqual(file_list.files, ['b.txt', mlp('d/c.py')]) self.assertNoWarnings() file_list.process_template_line('exclude *.rb') - self.assertEqual(file_list.files, ['b.txt', l('d/c.py')]) + self.assertEqual(file_list.files, ['b.txt', mlp('d/c.py')]) self.assertWarnings() # global-include file_list = FileList() - file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) + file_list.set_allfiles(['a.py', 'b.txt', mlp('d/c.py')]) file_list.process_template_line('global-include *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) + self.assertEqual(file_list.files, ['a.py', mlp('d/c.py')]) self.assertNoWarnings() file_list.process_template_line('global-include *.rb') - self.assertEqual(file_list.files, ['a.py', l('d/c.py')]) + self.assertEqual(file_list.files, ['a.py', mlp('d/c.py')]) self.assertWarnings() # global-exclude file_list = FileList() - file_list.files = ['a.py', 'b.txt', l('d/c.py')] + file_list.files = ['a.py', 'b.txt', mlp('d/c.py')] file_list.process_template_line('global-exclude *.py') self.assertEqual(file_list.files, ['b.txt']) @@ -248,52 +259,50 @@ class FileListTestCase(support.LoggingSilencer, # recursive-include file_list = FileList() - file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'), - l('d/d/e.py')]) + file_list.set_allfiles(['a.py', mlp('d/b.py'), mlp('d/c.txt'), mlp('d/d/e.py')]) file_list.process_template_line('recursive-include d *.py') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) + self.assertEqual(file_list.files, [mlp('d/b.py'), mlp('d/d/e.py')]) self.assertNoWarnings() file_list.process_template_line('recursive-include e *.py') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) + self.assertEqual(file_list.files, [mlp('d/b.py'), mlp('d/d/e.py')]) self.assertWarnings() # recursive-exclude file_list = FileList() - file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')] + file_list.files = ['a.py', mlp('d/b.py'), mlp('d/c.txt'), mlp('d/d/e.py')] file_list.process_template_line('recursive-exclude d *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) + self.assertEqual(file_list.files, ['a.py', mlp('d/c.txt')]) self.assertNoWarnings() file_list.process_template_line('recursive-exclude e *.py') - self.assertEqual(file_list.files, ['a.py', l('d/c.txt')]) + self.assertEqual(file_list.files, ['a.py', mlp('d/c.txt')]) self.assertWarnings() # graft file_list = FileList() - file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'), - l('f/f.py')]) + file_list.set_allfiles(['a.py', mlp('d/b.py'), mlp('d/d/e.py'), mlp('f/f.py')]) file_list.process_template_line('graft d') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) + self.assertEqual(file_list.files, [mlp('d/b.py'), mlp('d/d/e.py')]) self.assertNoWarnings() file_list.process_template_line('graft e') - self.assertEqual(file_list.files, [l('d/b.py'), l('d/d/e.py')]) + self.assertEqual(file_list.files, [mlp('d/b.py'), mlp('d/d/e.py')]) self.assertWarnings() # prune file_list = FileList() - file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')] + file_list.files = ['a.py', mlp('d/b.py'), mlp('d/d/e.py'), mlp('f/f.py')] file_list.process_template_line('prune d') - self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) + self.assertEqual(file_list.files, ['a.py', mlp('f/f.py')]) self.assertNoWarnings() file_list.process_template_line('prune e') - self.assertEqual(file_list.files, ['a.py', l('f/f.py')]) + self.assertEqual(file_list.files, ['a.py', mlp('f/f.py')]) self.assertWarnings() @@ -343,10 +352,12 @@ class FindAllTestCase(unittest.TestCase): def test_suite(): - return unittest.TestSuite([ - unittest.TestLoader().loadTestsFromTestCase(FileListTestCase), - unittest.TestLoader().loadTestsFromTestCase(FindAllTestCase), - ]) + return unittest.TestSuite( + [ + unittest.TestLoader().loadTestsFromTestCase(FileListTestCase), + unittest.TestLoader().loadTestsFromTestCase(FindAllTestCase), + ] + ) if __name__ == "__main__": diff --git a/setuptools/_distutils/tests/test_install.py b/setuptools/_distutils/tests/test_install.py index 3aef9e4..5f0a64d 100644 --- a/setuptools/_distutils/tests/test_install.py +++ b/setuptools/_distutils/tests/test_install.py @@ -19,16 +19,23 @@ from distutils.extension import Extension from distutils.tests import support from test import support as test_support +import pytest + def _make_ext_name(modname): return modname + sysconfig.get_config_var('EXT_SUFFIX') -class InstallTestCase(support.TempdirManager, - support.EnvironGuard, - support.LoggingSilencer, - unittest.TestCase): - +class InstallTestCase( + support.TempdirManager, + support.EnvironGuard, + support.LoggingSilencer, + unittest.TestCase, +): + @pytest.mark.xfail( + 'platform.system() == "Windows" and sys.version_info > (3, 11)', + reason="pypa/distutils#148", + ) def test_home_installation_scheme(self): # This ensure two things: # - that --home generates the desired set of directory names @@ -42,7 +49,7 @@ class InstallTestCase(support.TempdirManager, dist.command_obj["build"] = support.DummyCommand( build_base=builddir, build_lib=os.path.join(builddir, "lib"), - ) + ) cmd = install(dist) cmd.home = destination @@ -63,8 +70,10 @@ class InstallTestCase(support.TempdirManager, platlibdir = os.path.join(destination, _platlibdir, impl_name) check_path(cmd.install_platlib, platlibdir) check_path(cmd.install_purelib, libdir) - check_path(cmd.install_headers, - os.path.join(destination, "include", impl_name, "foopkg")) + check_path( + cmd.install_headers, + os.path.join(destination, "include", impl_name, "foopkg"), + ) check_path(cmd.install_scripts, os.path.join(destination, "bin")) check_path(cmd.install_data, destination) @@ -85,6 +94,7 @@ class InstallTestCase(support.TempdirManager, if path.startswith('~'): return os.path.normpath(self.tmpdir + path[1:]) return path + self.old_expand = os.path.expanduser os.path.expanduser = _expanduser @@ -104,8 +114,7 @@ class InstallTestCase(support.TempdirManager, cmd = install(dist) # making sure the user option is there - options = [name for name, short, lable in - cmd.user_options] + options = [name for name, short, lable in cmd.user_options] self.assertIn('user', options) # setting a value @@ -128,13 +137,16 @@ class InstallTestCase(support.TempdirManager, actual_headers = os.path.relpath(cmd.install_headers, self.user_base) if os.name == 'nt': site_path = os.path.relpath( - os.path.dirname(self.old_user_site), self.old_user_base) + os.path.dirname(self.old_user_site), self.old_user_base + ) include = os.path.join(site_path, 'Include') else: include = sysconfig.get_python_inc(0, '') expect_headers = os.path.join(include, 'xx') - self.assertEqual(os.path.normcase(actual_headers), os.path.normcase(expect_headers)) + self.assertEqual( + os.path.normcase(actual_headers), os.path.normcase(expect_headers) + ) def test_handle_extra_path(self): dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'}) @@ -187,8 +199,7 @@ class InstallTestCase(support.TempdirManager, def test_record(self): install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(py_modules=['hello'], - scripts=['sayhi']) + project_dir, dist = self.create_dist(py_modules=['hello'], scripts=['sayhi']) os.chdir(project_dir) self.write_file('hello.py', "def main(): print('o hai')") self.write_file('sayhi', 'from hello import main; main()') @@ -207,9 +218,12 @@ class InstallTestCase(support.TempdirManager, f.close() found = [os.path.basename(line) for line in content.splitlines()] - expected = ['hello.py', 'hello.%s.pyc' % sys.implementation.cache_tag, - 'sayhi', - 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] + expected = [ + 'hello.py', + 'hello.%s.pyc' % sys.implementation.cache_tag, + 'sayhi', + 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2], + ] self.assertEqual(found, expected) def test_record_extensions(self): @@ -217,8 +231,9 @@ class InstallTestCase(support.TempdirManager, if cmd is not None: self.skipTest('The %r command is not found' % cmd) install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(ext_modules=[ - Extension('xx', ['xxmodule.c'])]) + project_dir, dist = self.create_dist( + ext_modules=[Extension('xx', ['xxmodule.c'])] + ) os.chdir(project_dir) support.copy_xxmodule_c(project_dir) @@ -241,8 +256,10 @@ class InstallTestCase(support.TempdirManager, f.close() found = [os.path.basename(line) for line in content.splitlines()] - expected = [_make_ext_name('xx'), - 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2]] + expected = [ + _make_ext_name('xx'), + 'UNKNOWN-0.0.0-py%s.%s.egg-info' % sys.version_info[:2], + ] self.assertEqual(found, expected) def test_debug_mode(self): @@ -260,5 +277,6 @@ class InstallTestCase(support.TempdirManager, def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(InstallTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_install_data.py b/setuptools/_distutils/tests/test_install_data.py index 6191d2f..a08168b 100644 --- a/setuptools/_distutils/tests/test_install_data.py +++ b/setuptools/_distutils/tests/test_install_data.py @@ -6,11 +6,13 @@ from distutils.command.install_data import install_data from distutils.tests import support from test.support import run_unittest -class InstallDataTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): +class InstallDataTestCase( + support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase, +): def test_simple_run(self): pkg_dir, dist = self.create_dist() cmd = install_data(dist) @@ -57,9 +59,7 @@ class InstallDataTestCase(support.TempdirManager, inst4 = os.path.join(pkg_dir, 'inst4') three = os.path.join(cmd.install_dir, 'three') self.write_file(three, 'xx') - cmd.data_files = [one, (inst2, [two]), - ('inst3', [three]), - (inst4, [])] + cmd.data_files = [one, (inst2, [two]), ('inst3', [three]), (inst4, [])] cmd.ensure_finalized() cmd.run() @@ -68,8 +68,10 @@ class InstallDataTestCase(support.TempdirManager, self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) self.assertTrue(os.path.exists(os.path.join(inst, rone))) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(InstallDataTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_install_headers.py b/setuptools/_distutils/tests/test_install_headers.py index 1aa4d09..db4f4db 100644 --- a/setuptools/_distutils/tests/test_install_headers.py +++ b/setuptools/_distutils/tests/test_install_headers.py @@ -6,11 +6,13 @@ from distutils.command.install_headers import install_headers from distutils.tests import support from test.support import run_unittest -class InstallHeadersTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): +class InstallHeadersTestCase( + support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase, +): def test_simple_run(self): # we have two headers header_list = self.mkdtemp() @@ -32,8 +34,10 @@ class InstallHeadersTestCase(support.TempdirManager, # let's check the results self.assertEqual(len(cmd.get_outputs()), 2) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(InstallHeadersTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_install_lib.py b/setuptools/_distutils/tests/test_install_lib.py index 652653f..1ef233a 100644 --- a/setuptools/_distutils/tests/test_install_lib.py +++ b/setuptools/_distutils/tests/test_install_lib.py @@ -11,11 +11,12 @@ from distutils.errors import DistutilsOptionError from test.support import run_unittest -class InstallLibTestCase(support.TempdirManager, - support.LoggingSilencer, - support.EnvironGuard, - unittest.TestCase): - +class InstallLibTestCase( + support.TempdirManager, + support.LoggingSilencer, + support.EnvironGuard, + unittest.TestCase, +): def test_finalize_options(self): dist = self.create_dist()[1] cmd = install_lib(dist) @@ -45,8 +46,9 @@ class InstallLibTestCase(support.TempdirManager, self.write_file(f, '# python file') cmd.byte_compile([f]) pyc_file = importlib.util.cache_from_source('foo.py', optimization='') - pyc_opt_file = importlib.util.cache_from_source('foo.py', - optimization=cmd.optimize) + pyc_opt_file = importlib.util.cache_from_source( + 'foo.py', optimization=cmd.optimize + ) self.assertTrue(os.path.exists(pyc_file)) self.assertTrue(os.path.exists(pyc_opt_file)) @@ -104,12 +106,12 @@ class InstallLibTestCase(support.TempdirManager, finally: sys.dont_write_bytecode = old_dont_write_bytecode - self.assertIn('byte-compiling is disabled', - self.logs[0][1] % self.logs[0][2]) + self.assertIn('byte-compiling is disabled', self.logs[0][1] % self.logs[0][2]) def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(InstallLibTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_install_scripts.py b/setuptools/_distutils/tests/test_install_scripts.py index 648db3b..bac7880 100644 --- a/setuptools/_distutils/tests/test_install_scripts.py +++ b/setuptools/_distutils/tests/test_install_scripts.py @@ -10,19 +10,17 @@ from distutils.tests import support from test.support import run_unittest -class InstallScriptsTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - +class InstallScriptsTestCase( + support.TempdirManager, support.LoggingSilencer, unittest.TestCase +): def test_default_settings(self): dist = Distribution() - dist.command_obj["build"] = support.DummyCommand( - build_scripts="/foo/bar") + dist.command_obj["build"] = support.DummyCommand(build_scripts="/foo/bar") dist.command_obj["install"] = support.DummyCommand( install_scripts="/splat/funk", force=1, skip_build=1, - ) + ) cmd = install_scripts(dist) self.assertFalse(cmd.force) self.assertFalse(cmd.skip_build) @@ -48,15 +46,21 @@ class InstallScriptsTestCase(support.TempdirManager, finally: f.close() - write_script("script1.py", ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("script2.py", ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("shell.sh", ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) + write_script( + "script1.py", + ( + "#! /usr/bin/env python2.3\n" + "# bogus script w/ Python sh-bang\n" + "pass\n" + ), + ) + write_script( + "script2.py", + ("#!/usr/bin/python\n" "# bogus script w/ Python sh-bang\n" "pass\n"), + ) + write_script( + "shell.sh", ("#!/bin/sh\n" "# bogus shell script w/ sh-bang\n" "exit 0\n") + ) target = self.mkdtemp() dist = Distribution() @@ -65,7 +69,7 @@ class InstallScriptsTestCase(support.TempdirManager, install_scripts=target, force=1, skip_build=1, - ) + ) cmd = install_scripts(dist) cmd.finalize_options() cmd.run() @@ -78,5 +82,6 @@ class InstallScriptsTestCase(support.TempdirManager, def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(InstallScriptsTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_log.py b/setuptools/_distutils/tests/test_log.py index ec2ae02..33f7f96 100644 --- a/setuptools/_distutils/tests/test_log.py +++ b/setuptools/_distutils/tests/test_log.py @@ -7,40 +7,55 @@ from test.support import swap_attr, run_unittest from distutils import log + class TestLog(unittest.TestCase): def test_non_ascii(self): # Issues #8663, #34421: test that non-encodable text is escaped with # backslashreplace error handler and encodable non-ASCII text is # output as is. - for errors in ('strict', 'backslashreplace', 'surrogateescape', - 'replace', 'ignore'): + for errors in ( + 'strict', + 'backslashreplace', + 'surrogateescape', + 'replace', + 'ignore', + ): with self.subTest(errors=errors): - stdout = io.TextIOWrapper(io.BytesIO(), - encoding='cp437', errors=errors) - stderr = io.TextIOWrapper(io.BytesIO(), - encoding='cp437', errors=errors) + stdout = io.TextIOWrapper(io.BytesIO(), encoding='cp437', errors=errors) + stderr = io.TextIOWrapper(io.BytesIO(), encoding='cp437', errors=errors) old_threshold = log.set_threshold(log.DEBUG) try: - with swap_attr(sys, 'stdout', stdout), \ - swap_attr(sys, 'stderr', stderr): + with swap_attr(sys, 'stdout', stdout), swap_attr( + sys, 'stderr', stderr + ): log.debug('Dεbug\tMėssãge') log.fatal('Fαtal\tÈrrōr') finally: log.set_threshold(old_threshold) stdout.seek(0) - self.assertEqual(stdout.read().rstrip(), - 'Dεbug\tM?ss?ge' if errors == 'replace' else - 'Dεbug\tMssge' if errors == 'ignore' else - 'Dεbug\tM\\u0117ss\\xe3ge') + self.assertEqual( + stdout.read().rstrip(), + 'Dεbug\tM?ss?ge' + if errors == 'replace' + else 'Dεbug\tMssge' + if errors == 'ignore' + else 'Dεbug\tM\\u0117ss\\xe3ge', + ) stderr.seek(0) - self.assertEqual(stderr.read().rstrip(), - 'Fαtal\t?rr?r' if errors == 'replace' else - 'Fαtal\trrr' if errors == 'ignore' else - 'Fαtal\t\\xc8rr\\u014dr') + self.assertEqual( + stderr.read().rstrip(), + 'Fαtal\t?rr?r' + if errors == 'replace' + else 'Fαtal\trrr' + if errors == 'ignore' + else 'Fαtal\t\\xc8rr\\u014dr', + ) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(TestLog) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_msvc9compiler.py b/setuptools/_distutils/tests/test_msvc9compiler.py index 6235405..ec4781a 100644 --- a/setuptools/_distutils/tests/test_msvc9compiler.py +++ b/setuptools/_distutils/tests/test_msvc9compiler.py @@ -90,38 +90,42 @@ _CLEANED_MANIFEST = """\ """ -if sys.platform=="win32": +if sys.platform == "win32": from distutils.msvccompiler import get_build_version - if get_build_version()>=8.0: + + if get_build_version() >= 8.0: SKIP_MESSAGE = None else: SKIP_MESSAGE = "These tests are only for MSVC8.0 or above" else: SKIP_MESSAGE = "These tests are only for win32" -@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) -class msvc9compilerTestCase(support.TempdirManager, - unittest.TestCase): +@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) +class msvc9compilerTestCase(support.TempdirManager, unittest.TestCase): def test_no_compiler(self): # makes sure query_vcvarsall raises # a DistutilsPlatformError if the compiler # is not found from distutils.msvc9compiler import query_vcvarsall + def _find_vcvarsall(version): return None from distutils import msvc9compiler + old_find_vcvarsall = msvc9compiler.find_vcvarsall msvc9compiler.find_vcvarsall = _find_vcvarsall try: - self.assertRaises(DistutilsPlatformError, query_vcvarsall, - 'wont find this version') + self.assertRaises( + DistutilsPlatformError, query_vcvarsall, 'wont find this version' + ) finally: msvc9compiler.find_vcvarsall = old_find_vcvarsall def test_reg_class(self): from distutils.msvc9compiler import Reg + self.assertRaises(KeyError, Reg.get_value, 'xxx', 'xxx') # looking for values that should exist on all @@ -131,6 +135,7 @@ class msvc9compilerTestCase(support.TempdirManager, self.assertIn(v, ('0', '1', '2')) import winreg + HKCU = winreg.HKEY_CURRENT_USER keys = Reg.read_keys(HKCU, 'xxxx') self.assertEqual(keys, None) @@ -140,6 +145,7 @@ class msvc9compilerTestCase(support.TempdirManager, def test_remove_visual_c_ref(self): from distutils.msvc9compiler import MSVCCompiler + tempdir = self.mkdtemp() manifest = os.path.join(tempdir, 'manifest') f = open(manifest, 'w') @@ -164,6 +170,7 @@ class msvc9compilerTestCase(support.TempdirManager, def test_remove_entire_manifest(self): from distutils.msvc9compiler import MSVCCompiler + tempdir = self.mkdtemp() manifest = os.path.join(tempdir, 'manifest') f = open(manifest, 'w') @@ -180,5 +187,6 @@ class msvc9compilerTestCase(support.TempdirManager, def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(msvc9compilerTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_msvccompiler.py b/setuptools/_distutils/tests/test_msvccompiler.py index 846e5bb..21fe57f 100644 --- a/setuptools/_distutils/tests/test_msvccompiler.py +++ b/setuptools/_distutils/tests/test_msvccompiler.py @@ -9,15 +9,14 @@ from distutils.tests import support from test.support import run_unittest -SKIP_MESSAGE = (None if sys.platform == "win32" else - "These tests are only for win32") +SKIP_MESSAGE = None if sys.platform == "win32" else "These tests are only for win32" -@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) -class msvccompilerTestCase(support.TempdirManager, - unittest.TestCase): +@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) +class msvccompilerTestCase(support.TempdirManager, unittest.TestCase): def test_no_compiler(self): import distutils._msvccompiler as _msvccompiler + # makes sure query_vcvarsall raises # a DistutilsPlatformError if the compiler # is not found @@ -27,9 +26,11 @@ class msvccompilerTestCase(support.TempdirManager, old_find_vcvarsall = _msvccompiler._find_vcvarsall _msvccompiler._find_vcvarsall = _find_vcvarsall try: - self.assertRaises(DistutilsPlatformError, - _msvccompiler._get_vc_env, - 'wont find this version') + self.assertRaises( + DistutilsPlatformError, + _msvccompiler._get_vc_env, + 'wont find this version', + ) finally: _msvccompiler._find_vcvarsall = old_find_vcvarsall @@ -95,14 +96,14 @@ class TestSpawn(unittest.TestCase): Concurrent calls to spawn should have consistent results. """ import distutils._msvccompiler as _msvccompiler + compiler = _msvccompiler.MSVCCompiler() compiler._paths = "expected" inner_cmd = 'import os; assert os.environ["PATH"] == "expected"' command = [sys.executable, '-c', inner_cmd] threads = [ - CheckThread(target=compiler.spawn, args=[command]) - for n in range(100) + CheckThread(target=compiler.spawn, args=[command]) for n in range(100) ] for thread in threads: thread.start() @@ -117,6 +118,7 @@ class TestSpawn(unittest.TestCase): """ import distutils._msvccompiler as _msvccompiler from distutils import ccompiler + compiler = _msvccompiler.MSVCCompiler() compiler._paths = "expected" @@ -124,8 +126,7 @@ class TestSpawn(unittest.TestCase): "A spawn without an env argument." assert os.environ["PATH"] == "expected" - with unittest.mock.patch.object( - ccompiler.CCompiler, 'spawn', CCompiler_spawn): + with unittest.mock.patch.object(ccompiler.CCompiler, 'spawn', CCompiler_spawn): compiler.spawn(["n/a"]) assert os.environ.get("PATH") != "expected" @@ -134,5 +135,6 @@ class TestSpawn(unittest.TestCase): def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(msvccompilerTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_register.py b/setuptools/_distutils/tests/test_register.py index 4556768..0f91ad3 100644 --- a/setuptools/_distutils/tests/test_register.py +++ b/setuptools/_distutils/tests/test_register.py @@ -41,8 +41,10 @@ username:tarek password:password """ + class Inputs(object): """Fakes user inputs.""" + def __init__(self, *answers): self.answers = answers self.index = 0 @@ -53,8 +55,10 @@ class Inputs(object): finally: self.index += 1 + class FakeOpener(object): """Fakes a PyPI server""" + def __init__(self): self.reqs = [] @@ -71,17 +75,18 @@ class FakeOpener(object): def getheader(self, name, default=None): return { 'content-type': 'text/plain; charset=utf-8', - }.get(name.lower(), default) + }.get(name.lower(), default) class RegisterTestCase(BasePyPIRCCommandTestCase): - def setUp(self): super(RegisterTestCase, self).setUp() # patching the password prompt self._old_getpass = getpass.getpass + def _getpass(prompt): return 'password' + getpass.getpass = _getpass urllib.request._opener = None self.old_opener = urllib.request.build_opener @@ -95,9 +100,13 @@ class RegisterTestCase(BasePyPIRCCommandTestCase): def _get_cmd(self, metadata=None): if metadata is None: - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx'} + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + } pkg_info, dist = self.create_dist(**metadata) return register(dist) @@ -143,6 +152,7 @@ class RegisterTestCase(BasePyPIRCCommandTestCase): # if we run the command again def _no_way(prompt=''): raise AssertionError(prompt) + register_module.input = _no_way cmd.show_response = 1 @@ -220,10 +230,14 @@ class RegisterTestCase(BasePyPIRCCommandTestCase): self.assertRaises(DistutilsSetupError, cmd.run) # metadata are OK but long_description is broken - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'éxéxé', - 'name': 'xxx', 'version': 'xxx', - 'long_description': 'title\n==\n\ntext'} + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'éxéxé', + 'name': 'xxx', + 'version': 'xxx', + 'long_description': 'title\n==\n\ntext', + } cmd = self._get_cmd(metadata) cmd.ensure_finalized() @@ -255,11 +269,15 @@ class RegisterTestCase(BasePyPIRCCommandTestCase): del register_module.input # and finally a Unicode test (bug #12114) - metadata = {'url': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': 'xxx', - 'description': 'Something about esszet \u00df', - 'long_description': 'More things about esszet \u00df'} + metadata = { + 'url': 'xxx', + 'author': '\u00c9ric', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'description': 'Something about esszet \u00df', + 'long_description': 'More things about esszet \u00df', + } cmd = self._get_cmd(metadata) cmd.ensure_finalized() @@ -275,10 +293,14 @@ class RegisterTestCase(BasePyPIRCCommandTestCase): @unittest.skipUnless(docutils is not None, 'needs docutils') def test_register_invalid_long_description(self): description = ':funkie:`str`' # mimic Sphinx-specific markup - metadata = {'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx', - 'long_description': description} + metadata = { + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + 'name': 'xxx', + 'version': 'xxx', + 'long_description': description, + } cmd = self._get_cmd(metadata) cmd.ensure_finalized() cmd.strict = True @@ -321,5 +343,6 @@ class RegisterTestCase(BasePyPIRCCommandTestCase): def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(RegisterTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_sdist.py b/setuptools/_distutils/tests/test_sdist.py index aa04dd0..3a6aea2 100644 --- a/setuptools/_distutils/tests/test_sdist.py +++ b/setuptools/_distutils/tests/test_sdist.py @@ -13,6 +13,7 @@ from .py38compat import check_warnings try: import zlib + ZLIB_SUPPORT = True except ImportError: ZLIB_SUPPORT = False @@ -48,8 +49,8 @@ somecode%(sep)sdoc.dat somecode%(sep)sdoc.txt """ -class SDistTestCase(BasePyPIRCCommandTestCase): +class SDistTestCase(BasePyPIRCCommandTestCase): def setUp(self): # PyPIRCCommandTestCase creates a temp dir already # and put it in self.tmp_dir @@ -72,9 +73,13 @@ class SDistTestCase(BasePyPIRCCommandTestCase): def get_cmd(self, metadata=None): """Returns a cmd""" if metadata is None: - metadata = {'name': 'fake', 'version': '1.0', - 'url': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'} + metadata = { + 'name': 'fake', + 'version': '1.0', + 'url': 'xxx', + 'author': 'xxx', + 'author_email': 'xxx', + } dist = Distribution(metadata) dist.script_name = 'setup.py' dist.packages = ['somecode'] @@ -93,12 +98,10 @@ class SDistTestCase(BasePyPIRCCommandTestCase): self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx') os.mkdir(join(self.tmp_dir, 'somecode', '.hg')) - self.write_file((self.tmp_dir, 'somecode', '.hg', - 'ok'), 'xxx') + self.write_file((self.tmp_dir, 'somecode', '.hg', 'ok'), 'xxx') os.mkdir(join(self.tmp_dir, 'somecode', '.git')) - self.write_file((self.tmp_dir, 'somecode', '.git', - 'ok'), 'xxx') + self.write_file((self.tmp_dir, 'somecode', '.git', 'ok'), 'xxx') self.write_file((self.tmp_dir, 'somecode', '.nfs0001'), 'xxx') @@ -124,15 +127,19 @@ class SDistTestCase(BasePyPIRCCommandTestCase): zip_file.close() # making sure everything has been pruned correctly - expected = ['', 'PKG-INFO', 'README', 'setup.py', - 'somecode/', 'somecode/__init__.py'] + expected = [ + '', + 'PKG-INFO', + 'README', + 'setup.py', + 'somecode/', + 'somecode/__init__.py', + ] self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - @unittest.skipIf(find_executable('tar') is None, - "The tar command is not found") - @unittest.skipIf(find_executable('gzip') is None, - "The gzip command is not found") + @unittest.skipIf(find_executable('tar') is None, "The tar command is not found") + @unittest.skipIf(find_executable('gzip') is None, "The gzip command is not found") def test_make_distribution(self): # now building a sdist dist, cmd = self.get_cmd() @@ -172,8 +179,7 @@ class SDistTestCase(BasePyPIRCCommandTestCase): # filling data_files by pointing files # in package_data - dist.package_data = {'': ['*.cfg', '*.dat'], - 'somecode': ['*.txt']} + dist.package_data = {'': ['*.cfg', '*.dat'], 'somecode': ['*.txt']} self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#') @@ -193,12 +199,11 @@ class SDistTestCase(BasePyPIRCCommandTestCase): self.write_file((some_dir, 'file.txt'), '#') self.write_file((some_dir, 'other_file.txt'), '#') - dist.data_files = [('data', ['data/data.dt', - 'buildout.cfg', - 'inroot.txt', - 'notexisting']), - 'some/file.txt', - 'some/other_file.txt'] + dist.data_files = [ + ('data', ['data/data.dt', 'buildout.cfg', 'inroot.txt', 'notexisting']), + 'some/file.txt', + 'some/other_file.txt', + ] # adding a script script_dir = join(self.tmp_dir, 'scripts') @@ -224,12 +229,25 @@ class SDistTestCase(BasePyPIRCCommandTestCase): zip_file.close() # making sure everything was added - expected = ['', 'PKG-INFO', 'README', 'buildout.cfg', - 'data/', 'data/data.dt', 'inroot.txt', - 'scripts/', 'scripts/script.py', 'setup.py', - 'some/', 'some/file.txt', 'some/other_file.txt', - 'somecode/', 'somecode/__init__.py', 'somecode/doc.dat', - 'somecode/doc.txt'] + expected = [ + '', + 'PKG-INFO', + 'README', + 'buildout.cfg', + 'data/', + 'data/data.dt', + 'inroot.txt', + 'scripts/', + 'scripts/script.py', + 'setup.py', + 'some/', + 'some/file.txt', + 'some/other_file.txt', + 'somecode/', + 'somecode/__init__.py', + 'somecode/doc.dat', + 'somecode/doc.txt', + ] self.assertEqual(sorted(content), ['fake-1.0/' + x for x in expected]) # checking the MANIFEST @@ -249,8 +267,9 @@ class SDistTestCase(BasePyPIRCCommandTestCase): # with the `check` subcommand cmd.ensure_finalized() cmd.run() - warnings = [msg for msg in self.get_logs(WARN) if - msg.startswith('warning: check:')] + warnings = [ + msg for msg in self.get_logs(WARN) if msg.startswith('warning: check:') + ] self.assertEqual(len(warnings), 1) # trying with a complete set of metadata @@ -259,8 +278,9 @@ class SDistTestCase(BasePyPIRCCommandTestCase): cmd.ensure_finalized() cmd.metadata_check = 0 cmd.run() - warnings = [msg for msg in self.get_logs(WARN) if - msg.startswith('warning: check:')] + warnings = [ + msg for msg in self.get_logs(WARN) if msg.startswith('warning: check:') + ] self.assertEqual(len(warnings), 0) def test_check_metadata_deprecated(self): @@ -277,8 +297,11 @@ class SDistTestCase(BasePyPIRCCommandTestCase): # the output should be a header line + one line per format num_formats = len(ARCHIVE_FORMATS.keys()) - output = [line for line in stdout.getvalue().split('\n') - if line.strip().startswith('--formats=')] + output = [ + line + for line in stdout.getvalue().split('\n') + if line.strip().startswith('--formats=') + ] self.assertEqual(len(output), num_formats) def test_finalize_options(self): @@ -341,8 +364,9 @@ class SDistTestCase(BasePyPIRCCommandTestCase): f = open(cmd.manifest) try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] + manifest = [ + line.strip() for line in f.read().split('\n') if line.strip() != '' + ] finally: f.close() @@ -360,8 +384,9 @@ class SDistTestCase(BasePyPIRCCommandTestCase): f = open(cmd.manifest) try: - manifest2 = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] + manifest2 = [ + line.strip() for line in f.read().split('\n') if line.strip() != '' + ] finally: f.close() @@ -378,22 +403,24 @@ class SDistTestCase(BasePyPIRCCommandTestCase): f = open(cmd.manifest) try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] + manifest = [ + line.strip() for line in f.read().split('\n') if line.strip() != '' + ] finally: f.close() - self.assertEqual(manifest[0], - '# file GENERATED by distutils, do NOT edit') + self.assertEqual(manifest[0], '# file GENERATED by distutils, do NOT edit') @unittest.skipUnless(ZLIB_SUPPORT, "Need zlib support to run") def test_manifest_comments(self): # make sure comments don't cause exceptions or wrong includes - contents = dedent("""\ + contents = dedent( + """\ # bad.py #bad.py good.py - """) + """ + ) dist, cmd = self.get_cmd() cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), contents) @@ -410,15 +437,18 @@ class SDistTestCase(BasePyPIRCCommandTestCase): cmd.formats = ['gztar'] cmd.ensure_finalized() self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') - self.write_file((self.tmp_dir, 'README.manual'), - 'This project maintains its MANIFEST file itself.') + self.write_file( + (self.tmp_dir, 'README.manual'), + 'This project maintains its MANIFEST file itself.', + ) cmd.run() self.assertEqual(cmd.filelist.files, ['README.manual']) f = open(cmd.manifest) try: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] + manifest = [ + line.strip() for line in f.read().split('\n') if line.strip() != '' + ] finally: f.close() @@ -430,16 +460,16 @@ class SDistTestCase(BasePyPIRCCommandTestCase): filenames = [tarinfo.name for tarinfo in archive] finally: archive.close() - self.assertEqual(sorted(filenames), ['fake-1.0', 'fake-1.0/PKG-INFO', - 'fake-1.0/README.manual']) + self.assertEqual( + sorted(filenames), + ['fake-1.0', 'fake-1.0/PKG-INFO', 'fake-1.0/README.manual'], + ) @unittest.skipUnless(ZLIB_SUPPORT, "requires zlib") @require_unix_id @require_uid_0 - @unittest.skipIf(find_executable('tar') is None, - "The tar command is not found") - @unittest.skipIf(find_executable('gzip') is None, - "The gzip command is not found") + @unittest.skipIf(find_executable('tar') is None, "The tar command is not found") + @unittest.skipIf(find_executable('gzip') is None, "The gzip command is not found") def test_make_distribution_owner_group(self): # now building a sdist dist, cmd = self.get_cmd() @@ -482,8 +512,10 @@ class SDistTestCase(BasePyPIRCCommandTestCase): finally: archive.close() + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(SDistTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_spawn.py b/setuptools/_distutils/tests/test_spawn.py index c5ed8e2..a773256 100644 --- a/setuptools/_distutils/tests/test_spawn.py +++ b/setuptools/_distutils/tests/test_spawn.py @@ -3,9 +3,8 @@ import os import stat import sys import unittest.mock -from test.support import run_unittest +from test.support import run_unittest, unix_shell -from .py35compat import unix_shell from . import py38compat as os_helper from distutils.spawn import find_executable @@ -13,12 +12,9 @@ from distutils.spawn import spawn from distutils.errors import DistutilsExecError from distutils.tests import support -class SpawnTestCase(support.TempdirManager, - support.LoggingSilencer, - unittest.TestCase): - @unittest.skipUnless(os.name in ('nt', 'posix'), - 'Runs only under posix or nt') +class SpawnTestCase(support.TempdirManager, support.LoggingSilencer, unittest.TestCase): + @unittest.skipUnless(os.name in ('nt', 'posix'), 'Runs only under posix or nt') def test_spawn(self): tmpdir = self.mkdtemp() @@ -74,16 +70,15 @@ class SpawnTestCase(support.TempdirManager, # test non-existent program dont_exist_program = "dontexist_" + program - rv = find_executable(dont_exist_program , path=tmp_dir) + rv = find_executable(dont_exist_program, path=tmp_dir) self.assertIsNone(rv) # PATH='': no match, except in the current directory with os_helper.EnvironmentVarGuard() as env: env['PATH'] = '' - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value=tmp_dir, create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', - tmp_dir): + with unittest.mock.patch( + 'distutils.spawn.os.confstr', return_value=tmp_dir, create=True + ), unittest.mock.patch('distutils.spawn.os.defpath', tmp_dir): rv = find_executable(program) self.assertIsNone(rv) @@ -95,9 +90,9 @@ class SpawnTestCase(support.TempdirManager, # PATH=':': explicitly looks in the current directory with os_helper.EnvironmentVarGuard() as env: env['PATH'] = os.pathsep - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value='', create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', ''): + with unittest.mock.patch( + 'distutils.spawn.os.confstr', return_value='', create=True + ), unittest.mock.patch('distutils.spawn.os.defpath', ''): rv = find_executable(program) self.assertIsNone(rv) @@ -111,18 +106,16 @@ class SpawnTestCase(support.TempdirManager, env.pop('PATH', None) # without confstr - with unittest.mock.patch('distutils.spawn.os.confstr', - side_effect=ValueError, - create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', - tmp_dir): + with unittest.mock.patch( + 'distutils.spawn.os.confstr', side_effect=ValueError, create=True + ), unittest.mock.patch('distutils.spawn.os.defpath', tmp_dir): rv = find_executable(program) self.assertEqual(rv, filename) # with confstr - with unittest.mock.patch('distutils.spawn.os.confstr', - return_value=tmp_dir, create=True), \ - unittest.mock.patch('distutils.spawn.os.defpath', ''): + with unittest.mock.patch( + 'distutils.spawn.os.confstr', return_value=tmp_dir, create=True + ), unittest.mock.patch('distutils.spawn.os.defpath', ''): rv = find_executable(program) self.assertEqual(rv, filename) @@ -135,5 +128,6 @@ class SpawnTestCase(support.TempdirManager, def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(SpawnTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_sysconfig.py b/setuptools/_distutils/tests/test_sysconfig.py index 1c88cc8..a033e07 100644 --- a/setuptools/_distutils/tests/test_sysconfig.py +++ b/setuptools/_distutils/tests/test_sysconfig.py @@ -17,7 +17,6 @@ from distutils.tests import support from test.support import run_unittest, swap_item from .py38compat import TESTFN -from .py38compat import check_warnings class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): @@ -41,20 +40,23 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): config_h = sysconfig.get_config_h_filename() self.assertTrue(os.path.isfile(config_h), config_h) - @unittest.skipIf(sys.platform == 'win32', - 'Makefile only exists on Unix like systems') - @unittest.skipIf(sys.implementation.name != 'cpython', - 'Makefile only exists in CPython') + @unittest.skipIf( + sys.platform == 'win32', 'Makefile only exists on Unix like systems' + ) + @unittest.skipIf( + sys.implementation.name != 'cpython', 'Makefile only exists in CPython' + ) def test_get_makefile_filename(self): makefile = sysconfig.get_makefile_filename() self.assertTrue(os.path.isfile(makefile), makefile) def test_get_python_lib(self): # XXX doesn't work on Linux when Python was never installed before - #self.assertTrue(os.path.isdir(lib_dir), lib_dir) + # self.assertTrue(os.path.isdir(lib_dir), lib_dir) # test for pythonxx.lib? - self.assertNotEqual(sysconfig.get_python_lib(), - sysconfig.get_python_lib(prefix=TESTFN)) + self.assertNotEqual( + sysconfig.get_python_lib(), sysconfig.get_python_lib(prefix=TESTFN) + ) def test_get_config_vars(self): cvars = sysconfig.get_config_vars() @@ -76,9 +78,7 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): self.assertTrue(os.path.exists(Python_h), Python_h) self.assertTrue(sysconfig._is_python_source_dir(srcdir)) elif os.name == 'posix': - self.assertEqual( - os.path.dirname(sysconfig.get_makefile_filename()), - srcdir) + self.assertEqual(os.path.dirname(sysconfig.get_makefile_filename()), srcdir) def test_srcdir_independent_of_cwd(self): # srcdir should be independent of the current working directory @@ -114,7 +114,6 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): 'CCSHARED': '--sc-ccshared', 'LDSHARED': 'sc_ldshared', 'SHLIB_SUFFIX': 'sc_shutil_suffix', - # On macOS, disable _osx_support.customize_compiler() 'CUSTOMIZED_OSX_COMPILER': 'True', } @@ -127,8 +126,9 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): return comp - @unittest.skipUnless(get_default_compiler() == 'unix', - 'not testing if default compiler is not unix') + @unittest.skipUnless( + get_default_compiler() == 'unix', 'not testing if default compiler is not unix' + ) def test_customize_compiler(self): # Make sure that sysconfig._config_vars is initialized sysconfig.get_config_vars() @@ -145,27 +145,25 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): os.environ['RANLIB'] = 'env_ranlib' comp = self.customize_compiler() - self.assertEqual(comp.exes['archiver'], - 'env_ar --env-arflags') - self.assertEqual(comp.exes['preprocessor'], - 'env_cpp --env-cppflags') - self.assertEqual(comp.exes['compiler'], - 'env_cc --sc-cflags --env-cflags --env-cppflags') - self.assertEqual(comp.exes['compiler_so'], - ('env_cc --sc-cflags ' - '--env-cflags ''--env-cppflags --sc-ccshared')) - self.assertEqual(comp.exes['compiler_cxx'], - 'env_cxx --env-cxx-flags') - self.assertEqual(comp.exes['linker_exe'], - 'env_cc') - self.assertEqual(comp.exes['linker_so'], - ('env_ldshared --env-ldflags --env-cflags' - ' --env-cppflags')) + self.assertEqual(comp.exes['archiver'], 'env_ar --env-arflags') + self.assertEqual(comp.exes['preprocessor'], 'env_cpp --env-cppflags') + self.assertEqual( + comp.exes['compiler'], 'env_cc --sc-cflags --env-cflags --env-cppflags' + ) + self.assertEqual( + comp.exes['compiler_so'], + ('env_cc --sc-cflags ' '--env-cflags ' '--env-cppflags --sc-ccshared'), + ) + self.assertEqual(comp.exes['compiler_cxx'], 'env_cxx --env-cxx-flags') + self.assertEqual(comp.exes['linker_exe'], 'env_cc') + self.assertEqual( + comp.exes['linker_so'], + ('env_ldshared --env-ldflags --env-cflags' ' --env-cppflags'), + ) self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') if sys.platform == "darwin": - self.assertEqual(comp.exes['ranlib'], - 'env_ranlib') + self.assertEqual(comp.exes['ranlib'], 'env_ranlib') else: self.assertTrue('ranlib' not in comp.exes) @@ -181,20 +179,13 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): del os.environ['RANLIB'] comp = self.customize_compiler() - self.assertEqual(comp.exes['archiver'], - 'sc_ar --sc-arflags') - self.assertEqual(comp.exes['preprocessor'], - 'sc_cc -E') - self.assertEqual(comp.exes['compiler'], - 'sc_cc --sc-cflags') - self.assertEqual(comp.exes['compiler_so'], - 'sc_cc --sc-cflags --sc-ccshared') - self.assertEqual(comp.exes['compiler_cxx'], - 'sc_cxx') - self.assertEqual(comp.exes['linker_exe'], - 'sc_cc') - self.assertEqual(comp.exes['linker_so'], - 'sc_ldshared') + self.assertEqual(comp.exes['archiver'], 'sc_ar --sc-arflags') + self.assertEqual(comp.exes['preprocessor'], 'sc_cc -E') + self.assertEqual(comp.exes['compiler'], 'sc_cc --sc-cflags') + self.assertEqual(comp.exes['compiler_so'], 'sc_cc --sc-cflags --sc-ccshared') + self.assertEqual(comp.exes['compiler_cxx'], 'sc_cxx') + self.assertEqual(comp.exes['linker_exe'], 'sc_cc') + self.assertEqual(comp.exes['linker_so'], 'sc_ldshared') self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') self.assertTrue('ranlib' not in comp.exes) @@ -207,8 +198,9 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): finally: fd.close() d = sysconfig.parse_makefile(self.makefile) - self.assertEqual(d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", - 'OTHER': 'foo'}) + self.assertEqual( + d, {'CONFIG_ARGS': "'--arg1=optarg1' 'ENV=LIB'", 'OTHER': 'foo'} + ) def test_parse_makefile_literal_dollar(self): self.makefile = TESTFN @@ -219,19 +211,25 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): finally: fd.close() d = sysconfig.parse_makefile(self.makefile) - self.assertEqual(d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", - 'OTHER': 'foo'}) - + self.assertEqual( + d, {'CONFIG_ARGS': r"'--arg1=optarg1' 'ENV=\$LIB'", 'OTHER': 'foo'} + ) def test_sysconfig_module(self): import sysconfig as global_sysconfig - self.assertEqual(global_sysconfig.get_config_var('CFLAGS'), - sysconfig.get_config_var('CFLAGS')) - self.assertEqual(global_sysconfig.get_config_var('LDFLAGS'), - sysconfig.get_config_var('LDFLAGS')) - @unittest.skipIf(sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'), - 'compiler flags customized') + self.assertEqual( + global_sysconfig.get_config_var('CFLAGS'), + sysconfig.get_config_var('CFLAGS'), + ) + self.assertEqual( + global_sysconfig.get_config_var('LDFLAGS'), + sysconfig.get_config_var('LDFLAGS'), + ) + + @unittest.skipIf( + sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'), 'compiler flags customized' + ) def test_sysconfig_compiler_vars(self): # On OS X, binary installers support extension module building on # various levels of the operating system with differing Xcode @@ -248,49 +246,46 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): # The longer-term solution is to only have one version of sysconfig. import sysconfig as global_sysconfig + if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'): self.skipTest('compiler flags customized') - self.assertEqual(global_sysconfig.get_config_var('LDSHARED'), - sysconfig.get_config_var('LDSHARED')) - self.assertEqual(global_sysconfig.get_config_var('CC'), - sysconfig.get_config_var('CC')) - - @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None, - 'EXT_SUFFIX required for this test') + self.assertEqual( + global_sysconfig.get_config_var('LDSHARED'), + sysconfig.get_config_var('LDSHARED'), + ) + self.assertEqual( + global_sysconfig.get_config_var('CC'), sysconfig.get_config_var('CC') + ) + + @unittest.skipIf( + sysconfig.get_config_var('EXT_SUFFIX') is None, + 'EXT_SUFFIX required for this test', + ) def test_SO_deprecation(self): - self.assertWarns(DeprecationWarning, - sysconfig.get_config_var, 'SO') - - @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None, - 'EXT_SUFFIX required for this test') - def test_SO_value(self): - with check_warnings(('', DeprecationWarning)): - self.assertEqual(sysconfig.get_config_var('SO'), - sysconfig.get_config_var('EXT_SUFFIX')) - - @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None, - 'EXT_SUFFIX required for this test') - def test_SO_in_vars(self): - vars = sysconfig.get_config_vars() - self.assertIsNotNone(vars['SO']) - self.assertEqual(vars['SO'], vars['EXT_SUFFIX']) + self.assertWarns(DeprecationWarning, sysconfig.get_config_var, 'SO') def test_customize_compiler_before_get_config_vars(self): # Issue #21923: test that a Distribution compiler # instance can be called without an explicit call to # get_config_vars(). with open(TESTFN, 'w') as f: - f.writelines(textwrap.dedent('''\ + f.writelines( + textwrap.dedent( + '''\ from distutils.core import Distribution config = Distribution().get_command_obj('config') # try_compile may pass or it may fail if no compiler # is found but it should not raise an exception. rc = config.try_compile('int x;') - ''')) - p = subprocess.Popen([str(sys.executable), TESTFN], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - universal_newlines=True) + ''' + ) + ) + p = subprocess.Popen( + [str(sys.executable), TESTFN], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) outs, errs = p.communicate() self.assertEqual(0, p.returncode, "Subprocess failed: " + outs) @@ -304,23 +299,22 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): result = sysconfig.parse_config_h(f) self.assertTrue(isinstance(result, dict)) - @unittest.skipUnless(sys.platform == 'win32', - 'Testing windows pyd suffix') - @unittest.skipUnless(sys.implementation.name == 'cpython', - 'Need cpython for this test') + @unittest.skipUnless(sys.platform == 'win32', 'Testing windows pyd suffix') + @unittest.skipUnless( + sys.implementation.name == 'cpython', 'Need cpython for this test' + ) def test_win_ext_suffix(self): self.assertTrue(sysconfig.get_config_var("EXT_SUFFIX").endswith(".pyd")) self.assertNotEqual(sysconfig.get_config_var("EXT_SUFFIX"), ".pyd") + @unittest.skipUnless(sys.platform == 'win32', 'Testing Windows build layout') @unittest.skipUnless( - sys.platform == 'win32', - 'Testing Windows build layout') - @unittest.skipUnless( - sys.implementation.name == 'cpython', - 'Need cpython for this test') + sys.implementation.name == 'cpython', 'Need cpython for this test' + ) @unittest.skipUnless( '\\PCbuild\\'.casefold() in sys.executable.casefold(), - 'Need sys.executable to be in a source tree') + 'Need sys.executable to be in a source tree', + ) def test_win_build_venv_from_source_tree(self): """Ensure distutils.sysconfig detects venvs from source tree builds.""" env = jaraco.envs.VEnv() @@ -330,10 +324,12 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): cmd = [ env.exe(), "-c", - "import distutils.sysconfig; print(distutils.sysconfig.python_build)" + "import distutils.sysconfig; print(distutils.sysconfig.python_build)", ] distutils_path = os.path.dirname(os.path.dirname(distutils.__file__)) - out = subprocess.check_output(cmd, env={**os.environ, "PYTHONPATH": distutils_path}) + out = subprocess.check_output( + cmd, env={**os.environ, "PYTHONPATH": distutils_path} + ) assert out == "True" diff --git a/setuptools/_distutils/tests/test_text_file.py b/setuptools/_distutils/tests/test_text_file.py index ebac3d5..16de9ca 100644 --- a/setuptools/_distutils/tests/test_text_file.py +++ b/setuptools/_distutils/tests/test_text_file.py @@ -12,32 +12,35 @@ line 3 \\ continues on next line """ -class TextFileTestCase(support.TempdirManager, unittest.TestCase): +class TextFileTestCase(support.TempdirManager, unittest.TestCase): def test_class(self): # old tests moved from text_file.__main__ # so they are really called by the buildbots # result 1: no fancy options - result1 = ['# test file\n', '\n', 'line 3 \\\n', - '# intervening comment\n', - ' continues on next line\n'] + result1 = [ + '# test file\n', + '\n', + 'line 3 \\\n', + '# intervening comment\n', + ' continues on next line\n', + ] # result 2: just strip comments - result2 = ["\n", - "line 3 \\\n", - " continues on next line\n"] + result2 = ["\n", "line 3 \\\n", " continues on next line\n"] # result 3: just strip blank lines - result3 = ["# test file\n", - "line 3 \\\n", - "# intervening comment\n", - " continues on next line\n"] + result3 = [ + "# test file\n", + "line 3 \\\n", + "# intervening comment\n", + " continues on next line\n", + ] # result 4: default, strip comments, blank lines, # and trailing whitespace - result4 = ["line 3 \\", - " continues on next line"] + result4 = ["line 3 \\", " continues on next line"] # result 5: strip comments and blanks, plus join lines (but don't # "collapse" joined lines @@ -59,22 +62,25 @@ class TextFileTestCase(support.TempdirManager, unittest.TestCase): finally: out_file.close() - in_file = TextFile(filename, strip_comments=0, skip_blanks=0, - lstrip_ws=0, rstrip_ws=0) + in_file = TextFile( + filename, strip_comments=0, skip_blanks=0, lstrip_ws=0, rstrip_ws=0 + ) try: test_input(1, "no processing", in_file, result1) finally: in_file.close() - in_file = TextFile(filename, strip_comments=1, skip_blanks=0, - lstrip_ws=0, rstrip_ws=0) + in_file = TextFile( + filename, strip_comments=1, skip_blanks=0, lstrip_ws=0, rstrip_ws=0 + ) try: test_input(2, "strip comments", in_file, result2) finally: in_file.close() - in_file = TextFile(filename, strip_comments=0, skip_blanks=1, - lstrip_ws=0, rstrip_ws=0) + in_file = TextFile( + filename, strip_comments=0, skip_blanks=1, lstrip_ws=0, rstrip_ws=0 + ) try: test_input(3, "strip blanks", in_file, result3) finally: @@ -86,22 +92,31 @@ class TextFileTestCase(support.TempdirManager, unittest.TestCase): finally: in_file.close() - in_file = TextFile(filename, strip_comments=1, skip_blanks=1, - join_lines=1, rstrip_ws=1) + in_file = TextFile( + filename, strip_comments=1, skip_blanks=1, join_lines=1, rstrip_ws=1 + ) try: test_input(5, "join lines without collapsing", in_file, result5) finally: in_file.close() - in_file = TextFile(filename, strip_comments=1, skip_blanks=1, - join_lines=1, rstrip_ws=1, collapse_join=1) + in_file = TextFile( + filename, + strip_comments=1, + skip_blanks=1, + join_lines=1, + rstrip_ws=1, + collapse_join=1, + ) try: test_input(6, "join lines with collapsing", in_file, result6) finally: in_file.close() + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(TextFileTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_unixccompiler.py b/setuptools/_distutils/tests/test_unixccompiler.py index c8b4c14..879769f 100644 --- a/setuptools/_distutils/tests/test_unixccompiler.py +++ b/setuptools/_distutils/tests/test_unixccompiler.py @@ -14,16 +14,18 @@ from distutils.util import _clear_cached_macosx_ver from . import support -class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): +class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): def setUp(self): super().setUp() self._backup_platform = sys.platform self._backup_get_config_var = sysconfig.get_config_var self._backup_get_config_vars = sysconfig.get_config_vars + class CompilerWrapper(UnixCCompiler): def rpath_foo(self): return self.runtime_library_dir_option('/foo') + self.cc = CompilerWrapper() def tearDown(self): @@ -49,18 +51,18 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): # Version value of None generates two tests: as None and as empty string # Expected flag value of None means an mismatch exception is expected darwin_test_cases = [ - ((None , None ), darwin_lib_flag), - ((None , '11' ), darwin_rpath_flag), - (('10' , None ), darwin_lib_flag), - (('10.3' , None ), darwin_lib_flag), - (('10.3.1', None ), darwin_lib_flag), - (('10.5' , None ), darwin_rpath_flag), - (('10.5.1', None ), darwin_rpath_flag), - (('10.3' , '10.3' ), darwin_lib_flag), - (('10.3' , '10.5' ), darwin_rpath_flag), - (('10.5' , '10.3' ), darwin_lib_flag), - (('10.5' , '11' ), darwin_rpath_flag), - (('10.4' , '10' ), None), + ((None, None), darwin_lib_flag), + ((None, '11'), darwin_rpath_flag), + (('10', None), darwin_lib_flag), + (('10.3', None), darwin_lib_flag), + (('10.3.1', None), darwin_lib_flag), + (('10.5', None), darwin_rpath_flag), + (('10.5.1', None), darwin_rpath_flag), + (('10.3', '10.3'), darwin_lib_flag), + (('10.3', '10.5'), darwin_rpath_flag), + (('10.5', '10.3'), darwin_lib_flag), + (('10.5', '11'), darwin_rpath_flag), + (('10.4', '10'), None), ] def make_darwin_gcv(syscfg_macosx_ver): @@ -68,12 +70,15 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): if var == darwin_ver_var: return syscfg_macosx_ver return "xxx" + return gcv def do_darwin_test(syscfg_macosx_ver, env_macosx_ver, expected_flag): env = os.environ - msg = "macOS version = (sysconfig=%r, env=%r)" % \ - (syscfg_macosx_ver, env_macosx_ver) + msg = "macOS version = (sysconfig=%r, env=%r)" % ( + syscfg_macosx_ver, + env_macosx_ver, + ) # Save old_gcv = sysconfig.get_config_var @@ -91,8 +96,9 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): if expected_flag is not None: self.assertEqual(self.cc.rpath_foo(), expected_flag, msg=msg) else: - with self.assertRaisesRegex(DistutilsPlatformError, - darwin_ver_var + r' mismatch', msg=msg): + with self.assertRaisesRegex( + DistutilsPlatformError, darwin_ver_var + r' mismatch', msg=msg + ): self.cc.rpath_foo() # Restore @@ -118,18 +124,22 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): # hp-ux sys.platform = 'hp-ux' + def gcv(v): return 'xxx' + sysconfig.get_config_var = gcv self.assertEqual(self.cc.rpath_foo(), ['+s', '-L/foo']) def gcv(v): return 'gcc' + sysconfig.get_config_var = gcv self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) def gcv(v): return 'g++' + sysconfig.get_config_var = gcv self.assertEqual(self.cc.rpath_foo(), ['-Wl,+s', '-L/foo']) @@ -137,11 +147,13 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): # GCC GNULD sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'gcc' elif v == 'GNULD': return 'yes' + sysconfig.get_config_var = gcv self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') @@ -150,47 +162,56 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): return 'gcc -pthread -B /bar' elif v == 'GNULD': return 'yes' + sysconfig.get_config_var = gcv self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') # GCC non-GNULD sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'gcc' elif v == 'GNULD': return 'no' + sysconfig.get_config_var = gcv self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo') # GCC GNULD with fully qualified configuration prefix # see #7617 sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'x86_64-pc-linux-gnu-gcc-4.4.2' elif v == 'GNULD': return 'yes' + sysconfig.get_config_var = gcv self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') # non-GCC GNULD sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'cc' elif v == 'GNULD': return 'yes' + sysconfig.get_config_var = gcv self.assertEqual(self.cc.rpath_foo(), '-Wl,--enable-new-dtags,-R/foo') # non-GCC non-GNULD sys.platform = 'bar' + def gcv(v): if v == 'CC': return 'cc' elif v == 'GNULD': return 'no' + sysconfig.get_config_var = gcv self.assertEqual(self.cc.rpath_foo(), '-Wl,-R/foo') @@ -207,6 +228,7 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): if args: return list(map(sysconfig.get_config_var, args)) return _orig() + sysconfig.get_config_var = gcv sysconfig.get_config_vars = gcvs with EnvironmentVarGuard() as env: @@ -223,6 +245,7 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): pypa/distutils#126 """ + def gcv(v): if v == 'LDSHARED': return 'gcc-4.2 -bundle -undefined dynamic_lookup ' @@ -237,10 +260,13 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): sysconfig.get_config_var = gcv sysconfig.get_config_vars = gcvs - with patch.object(self.cc, 'spawn', return_value=None) as mock_spawn, \ - patch.object(self.cc, '_need_link', return_value=True), \ - patch.object(self.cc, 'mkpath', return_value=None), \ - EnvironmentVarGuard() as env: + with patch.object( + self.cc, 'spawn', return_value=None + ) as mock_spawn, patch.object( + self.cc, '_need_link', return_value=True + ), patch.object( + self.cc, 'mkpath', return_value=None + ), EnvironmentVarGuard() as env: env['CC'] = 'ccache my_cc' env['CXX'] = 'my_cxx' del env['LDSHARED'] @@ -265,6 +291,7 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): if args: return list(map(sysconfig.get_config_var, args)) return _orig() + sysconfig.get_config_var = gcv sysconfig.get_config_vars = gcvs with EnvironmentVarGuard() as env: @@ -285,5 +312,6 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(UnixCCompilerTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_upload.py b/setuptools/_distutils/tests/test_upload.py index ce3e84a..afba2fa 100644 --- a/setuptools/_distutils/tests/test_upload.py +++ b/setuptools/_distutils/tests/test_upload.py @@ -43,8 +43,8 @@ index-servers = username:me """ -class FakeOpen(object): +class FakeOpen(object): def __init__(self, url, msg=None, code=None): self.url = url if not isinstance(url, str): @@ -57,7 +57,7 @@ class FakeOpen(object): def getheader(self, name, default=None): return { 'content-type': 'text/plain; charset=utf-8', - }.get(name.lower(), default) + }.get(name.lower(), default) def read(self): return b'xyzzy' @@ -67,7 +67,6 @@ class FakeOpen(object): class uploadTestCase(BasePyPIRCCommandTestCase): - def setUp(self): super(uploadTestCase, self).setUp() self.old_open = upload_mod.urlopen @@ -91,9 +90,12 @@ class uploadTestCase(BasePyPIRCCommandTestCase): dist = Distribution() cmd = upload(dist) cmd.finalize_options() - for attr, waited in (('username', 'me'), ('password', 'secret'), - ('realm', 'pypi'), - ('repository', 'https://upload.pypi.org/legacy/')): + for attr, waited in ( + ('username', 'me'), + ('password', 'secret'), + ('realm', 'pypi'), + ('repository', 'https://upload.pypi.org/legacy/'), + ): self.assertEqual(getattr(cmd, attr), waited) def test_saved_password(self): @@ -137,13 +139,12 @@ class uploadTestCase(BasePyPIRCCommandTestCase): expected_url = 'https://upload.pypi.org/legacy/' self.assertEqual(self.last_open.req.get_full_url(), expected_url) data = self.last_open.req.data - self.assertIn(b'xxx',data) + self.assertIn(b'xxx', data) self.assertIn(b'protocol_version', data) self.assertIn(b'sha256_digest', data) self.assertIn( - b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf' - b'6860', - data + b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf' b'6860', + data, ) if b'md5_digest' in data: self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data) @@ -152,7 +153,7 @@ class uploadTestCase(BasePyPIRCCommandTestCase): b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be' b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc' b'ce443f1534330a', - data + data, ) # The PyPI response body was echoed @@ -173,8 +174,7 @@ class uploadTestCase(BasePyPIRCCommandTestCase): # other fields that ended with \r used to be modified, now are # preserved. pkg_dir, dist = self.create_dist( - dist_files=dist_files, - description='long description\r' + dist_files=dist_files, description='long description\r' ) cmd = upload(dist) cmd.show_response = 1 @@ -200,13 +200,18 @@ class uploadTestCase(BasePyPIRCCommandTestCase): pkg_dir, dist = self.create_dist(dist_files=dist_files) tests = [ (OSError('oserror'), 'oserror', OSError), - (HTTPError('url', 400, 'httperror', {}, None), - 'Upload failed (400): httperror', DistutilsError), + ( + HTTPError('url', 400, 'httperror', {}, None), + 'Upload failed (400): httperror', + DistutilsError, + ), ] for exception, expected, raised_exception in tests: with self.subTest(exception=type(exception).__name__): - with mock.patch('distutils.command.upload.urlopen', - new=mock.Mock(side_effect=exception)): + with mock.patch( + 'distutils.command.upload.urlopen', + new=mock.Mock(side_effect=exception), + ): with self.assertRaises(raised_exception): cmd = upload(dist) cmd.ensure_finalized() @@ -219,5 +224,6 @@ class uploadTestCase(BasePyPIRCCommandTestCase): def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(uploadTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_util.py b/setuptools/_distutils/tests/test_util.py index 2738388..cebd61c 100644 --- a/setuptools/_distutils/tests/test_util.py +++ b/setuptools/_distutils/tests/test_util.py @@ -8,16 +8,24 @@ from test.support import run_unittest from unittest import mock from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError -from distutils.util import (get_platform, convert_path, change_root, - check_environ, split_quoted, strtobool, - rfc822_escape, byte_compile, - grok_environment_error, get_host_platform) -from distutils import util # used to patch _environ_checked +from distutils.util import ( + get_platform, + convert_path, + change_root, + check_environ, + split_quoted, + strtobool, + rfc822_escape, + byte_compile, + grok_environment_error, + get_host_platform, +) +from distutils import util # used to patch _environ_checked from distutils import sysconfig from distutils.tests import support -class UtilTestCase(support.EnvironGuard, unittest.TestCase): +class UtilTestCase(support.EnvironGuard, unittest.TestCase): def setUp(self): super(UtilTestCase, self).setUp() # saving the environment @@ -64,9 +72,9 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase): def test_get_host_platform(self): with unittest.mock.patch('os.name', 'nt'): - with unittest.mock.patch('sys.version', '... [... (ARM64)]'): + with unittest.mock.patch('sys.version', '... [... (ARM64)]'): self.assertEqual(get_host_platform(), 'win-arm64') - with unittest.mock.patch('sys.version', '... [... (ARM)]'): + with unittest.mock.patch('sys.version', '... [... (ARM)]'): self.assertEqual(get_host_platform(), 'win-arm32') with unittest.mock.patch('sys.version_info', (3, 9, 0, 'final', 0)): @@ -75,76 +83,86 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase): def test_get_platform(self): with unittest.mock.patch('os.name', 'nt'): with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x86'}): - self.assertEqual(get_platform(), 'win32') + self.assertEqual(get_platform(), 'win32') with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'x64'}): - self.assertEqual(get_platform(), 'win-amd64') + self.assertEqual(get_platform(), 'win-amd64') with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm'}): - self.assertEqual(get_platform(), 'win-arm32') - with unittest.mock.patch.dict('os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'}): - self.assertEqual(get_platform(), 'win-arm64') + self.assertEqual(get_platform(), 'win-arm32') + with unittest.mock.patch.dict( + 'os.environ', {'VSCMD_ARG_TGT_ARCH': 'arm64'} + ): + self.assertEqual(get_platform(), 'win-arm64') def test_convert_path(self): # linux/mac os.sep = '/' + def _join(path): return '/'.join(path) + os.path.join = _join - self.assertEqual(convert_path('/home/to/my/stuff'), - '/home/to/my/stuff') + self.assertEqual(convert_path('/home/to/my/stuff'), '/home/to/my/stuff') # win os.sep = '\\' + def _join(*path): return '\\'.join(path) + os.path.join = _join self.assertRaises(ValueError, convert_path, '/home/to/my/stuff') self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/') - self.assertEqual(convert_path('home/to/my/stuff'), - 'home\\to\\my\\stuff') - self.assertEqual(convert_path('.'), - os.curdir) + self.assertEqual(convert_path('home/to/my/stuff'), 'home\\to\\my\\stuff') + self.assertEqual(convert_path('.'), os.curdir) def test_change_root(self): # linux/mac os.name = 'posix' + def _isabs(path): return path[0] == '/' + os.path.isabs = _isabs + def _join(*path): return '/'.join(path) + os.path.join = _join - self.assertEqual(change_root('/root', '/old/its/here'), - '/root/old/its/here') - self.assertEqual(change_root('/root', 'its/here'), - '/root/its/here') + self.assertEqual(change_root('/root', '/old/its/here'), '/root/old/its/here') + self.assertEqual(change_root('/root', 'its/here'), '/root/its/here') # windows os.name = 'nt' + def _isabs(path): return path.startswith('c:\\') + os.path.isabs = _isabs + def _splitdrive(path): if path.startswith('c:'): return ('', path.replace('c:', '')) return ('', path) + os.path.splitdrive = _splitdrive + def _join(*path): return '\\'.join(path) + os.path.join = _join - self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'), - 'c:\\root\\old\\its\\here') - self.assertEqual(change_root('c:\\root', 'its\\here'), - 'c:\\root\\its\\here') + self.assertEqual( + change_root('c:\\root', 'c:\\old\\its\\here'), 'c:\\root\\old\\its\\here' + ) + self.assertEqual(change_root('c:\\root', 'its\\here'), 'c:\\root\\its\\here') # BugsBunny os (it's a great os) os.name = 'BugsBunny' - self.assertRaises(DistutilsPlatformError, - change_root, 'c:\\root', 'its\\here') + self.assertRaises(DistutilsPlatformError, change_root, 'c:\\root', 'its\\here') # XXX platforms to be covered: mac @@ -165,8 +183,9 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase): import pwd # only set pw_dir field, other fields are not used - result = pwd.struct_passwd((None, None, None, None, None, - '/home/distutils', None)) + result = pwd.struct_passwd( + (None, None, None, None, None, '/home/distutils', None) + ) with mock.patch.object(pwd, 'getpwuid', return_value=result): check_environ() self.assertEqual(os.environ['HOME'], '/home/distutils') @@ -180,8 +199,10 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase): self.assertNotIn('HOME', os.environ) def test_split_quoted(self): - self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'), - ['one', 'two', 'three', 'four']) + self.assertEqual( + split_quoted('""one"" "two" \'three\' \\four'), + ['one', 'two', 'three', 'four'], + ) def test_strtobool(self): yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1') @@ -196,8 +217,9 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase): def test_rfc822_escape(self): header = 'I am a\npoor\nlonesome\nheader\n' res = rfc822_escape(header) - wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s' - 'header%(8s)s') % {'8s': '\n'+8*' '} + wanted = ('I am a%(8s)spoor%(8s)slonesome%(8s)s' 'header%(8s)s') % { + '8s': '\n' + 8 * ' ' + } self.assertEqual(res, wanted) def test_dont_write_bytecode(self): @@ -220,5 +242,6 @@ class UtilTestCase(support.EnvironGuard, unittest.TestCase): def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(UtilTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_version.py b/setuptools/_distutils/tests/test_version.py index 8405aa3..cecb279 100644 --- a/setuptools/_distutils/tests/test_version.py +++ b/setuptools/_distutils/tests/test_version.py @@ -5,8 +5,8 @@ from distutils.version import LooseVersion from distutils.version import StrictVersion from test.support import run_unittest -class VersionTestCase(unittest.TestCase): +class VersionTestCase(unittest.TestCase): def setUp(self): self.ctx = distutils.version.suppress_known_deprecation() self.ctx.__enter__() @@ -24,21 +24,23 @@ class VersionTestCase(unittest.TestCase): self.assertEqual(str(version), '1.2') def test_cmp_strict(self): - versions = (('1.5.1', '1.5.2b2', -1), - ('161', '3.10a', ValueError), - ('8.02', '8.02', 0), - ('3.4j', '1996.07.12', ValueError), - ('3.2.pl0', '3.1.1.6', ValueError), - ('2g6', '11g', ValueError), - ('0.9', '2.2', -1), - ('1.2.1', '1.2', 1), - ('1.1', '1.2.2', -1), - ('1.2', '1.1', 1), - ('1.2.1', '1.2.2', -1), - ('1.2.2', '1.2', 1), - ('1.2', '1.2.2', -1), - ('0.4.0', '0.4', 0), - ('1.13++', '5.5.kw', ValueError)) + versions = ( + ('1.5.1', '1.5.2b2', -1), + ('161', '3.10a', ValueError), + ('8.02', '8.02', 0), + ('3.4j', '1996.07.12', ValueError), + ('3.2.pl0', '3.1.1.6', ValueError), + ('2g6', '11g', ValueError), + ('0.9', '2.2', -1), + ('1.2.1', '1.2', 1), + ('1.1', '1.2.2', -1), + ('1.2', '1.1', 1), + ('1.2.1', '1.2.2', -1), + ('1.2.2', '1.2', 1), + ('1.2', '1.2.2', -1), + ('0.4.0', '0.4', 0), + ('1.13++', '5.5.kw', ValueError), + ) for v1, v2, wanted in versions: try: @@ -47,49 +49,55 @@ class VersionTestCase(unittest.TestCase): if wanted is ValueError: continue else: - raise AssertionError(("cmp(%s, %s) " - "shouldn't raise ValueError") - % (v1, v2)) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) + raise AssertionError( + ("cmp(%s, %s) " "shouldn't raise ValueError") % (v1, v2) + ) + self.assertEqual( + res, wanted, 'cmp(%s, %s) should be %s, got %s' % (v1, v2, wanted, res) + ) res = StrictVersion(v1)._cmp(v2) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) + self.assertEqual( + res, wanted, 'cmp(%s, %s) should be %s, got %s' % (v1, v2, wanted, res) + ) res = StrictVersion(v1)._cmp(object()) - self.assertIs(res, NotImplemented, - 'cmp(%s, %s) should be NotImplemented, got %s' % - (v1, v2, res)) - + self.assertIs( + res, + NotImplemented, + 'cmp(%s, %s) should be NotImplemented, got %s' % (v1, v2, res), + ) def test_cmp(self): - versions = (('1.5.1', '1.5.2b2', -1), - ('161', '3.10a', 1), - ('8.02', '8.02', 0), - ('3.4j', '1996.07.12', -1), - ('3.2.pl0', '3.1.1.6', 1), - ('2g6', '11g', -1), - ('0.960923', '2.2beta29', -1), - ('1.13++', '5.5.kw', -1)) - + versions = ( + ('1.5.1', '1.5.2b2', -1), + ('161', '3.10a', 1), + ('8.02', '8.02', 0), + ('3.4j', '1996.07.12', -1), + ('3.2.pl0', '3.1.1.6', 1), + ('2g6', '11g', -1), + ('0.960923', '2.2beta29', -1), + ('1.13++', '5.5.kw', -1), + ) for v1, v2, wanted in versions: res = LooseVersion(v1)._cmp(LooseVersion(v2)) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) + self.assertEqual( + res, wanted, 'cmp(%s, %s) should be %s, got %s' % (v1, v2, wanted, res) + ) res = LooseVersion(v1)._cmp(v2) - self.assertEqual(res, wanted, - 'cmp(%s, %s) should be %s, got %s' % - (v1, v2, wanted, res)) + self.assertEqual( + res, wanted, 'cmp(%s, %s) should be %s, got %s' % (v1, v2, wanted, res) + ) res = LooseVersion(v1)._cmp(object()) - self.assertIs(res, NotImplemented, - 'cmp(%s, %s) should be NotImplemented, got %s' % - (v1, v2, res)) + self.assertIs( + res, + NotImplemented, + 'cmp(%s, %s) should be NotImplemented, got %s' % (v1, v2, res), + ) + def test_suite(): return unittest.TestLoader().loadTestsFromTestCase(VersionTestCase) + if __name__ == "__main__": run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/test_versionpredicate.py b/setuptools/_distutils/tests/test_versionpredicate.py index 28ae09d..ce3d0f4 100644 --- a/setuptools/_distutils/tests/test_versionpredicate.py +++ b/setuptools/_distutils/tests/test_versionpredicate.py @@ -6,8 +6,10 @@ import distutils.versionpredicate import doctest from test.support import run_unittest + def test_suite(): return doctest.DocTestSuite(distutils.versionpredicate) + if __name__ == '__main__': run_unittest(test_suite()) diff --git a/setuptools/_distutils/tests/unix_compat.py b/setuptools/_distutils/tests/unix_compat.py index b7718c2..8250b36 100644 --- a/setuptools/_distutils/tests/unix_compat.py +++ b/setuptools/_distutils/tests/unix_compat.py @@ -11,6 +11,5 @@ except ImportError: UNIX_ID_SUPPORT = grp and pwd UID_0_SUPPORT = UNIX_ID_SUPPORT and sys.platform != "cygwin" -require_unix_id = unittest.skipUnless( - UNIX_ID_SUPPORT, "Requires grp and pwd support") +require_unix_id = unittest.skipUnless(UNIX_ID_SUPPORT, "Requires grp and pwd support") require_uid_0 = unittest.skipUnless(UID_0_SUPPORT, "Requires UID 0 support") diff --git a/setuptools/_distutils/text_file.py b/setuptools/_distutils/text_file.py index 93abad3..015d680 100644 --- a/setuptools/_distutils/text_file.py +++ b/setuptools/_distutils/text_file.py @@ -9,79 +9,82 @@ import sys, io class TextFile: """Provides a file-like object that takes care of all the things you - commonly want to do when processing a text file that has some - line-by-line syntax: strip comments (as long as "#" is your - comment character), skip blank lines, join adjacent lines by - escaping the newline (ie. backslash at end of line), strip - leading and/or trailing whitespace. All of these are optional - and independently controllable. - - Provides a 'warn()' method so you can generate warning messages that - report physical line number, even if the logical line in question - spans multiple physical lines. Also provides 'unreadline()' for - implementing line-at-a-time lookahead. - - Constructor is called as: - - TextFile (filename=None, file=None, **options) - - It bombs (RuntimeError) if both 'filename' and 'file' are None; - 'filename' should be a string, and 'file' a file object (or - something that provides 'readline()' and 'close()' methods). It is - recommended that you supply at least 'filename', so that TextFile - can include it in warning messages. If 'file' is not supplied, - TextFile creates its own using 'io.open()'. - - The options are all boolean, and affect the value returned by - 'readline()': - strip_comments [default: true] - strip from "#" to end-of-line, as well as any whitespace - leading up to the "#" -- unless it is escaped by a backslash - lstrip_ws [default: false] - strip leading whitespace from each line before returning it - rstrip_ws [default: true] - strip trailing whitespace (including line terminator!) from - each line before returning it - skip_blanks [default: true} - skip lines that are empty *after* stripping comments and - whitespace. (If both lstrip_ws and rstrip_ws are false, - then some lines may consist of solely whitespace: these will - *not* be skipped, even if 'skip_blanks' is true.) - join_lines [default: false] - if a backslash is the last non-newline character on a line - after stripping comments and whitespace, join the following line - to it to form one "logical line"; if N consecutive lines end - with a backslash, then N+1 physical lines will be joined to - form one logical line. - collapse_join [default: false] - strip leading whitespace from lines that are joined to their - predecessor; only matters if (join_lines and not lstrip_ws) - errors [default: 'strict'] - error handler used to decode the file content - - Note that since 'rstrip_ws' can strip the trailing newline, the - semantics of 'readline()' must differ from those of the builtin file - object's 'readline()' method! In particular, 'readline()' returns - None for end-of-file: an empty string might just be a blank line (or - an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is - not.""" - - default_options = { 'strip_comments': 1, - 'skip_blanks': 1, - 'lstrip_ws': 0, - 'rstrip_ws': 1, - 'join_lines': 0, - 'collapse_join': 0, - 'errors': 'strict', - } + commonly want to do when processing a text file that has some + line-by-line syntax: strip comments (as long as "#" is your + comment character), skip blank lines, join adjacent lines by + escaping the newline (ie. backslash at end of line), strip + leading and/or trailing whitespace. All of these are optional + and independently controllable. + + Provides a 'warn()' method so you can generate warning messages that + report physical line number, even if the logical line in question + spans multiple physical lines. Also provides 'unreadline()' for + implementing line-at-a-time lookahead. + + Constructor is called as: + + TextFile (filename=None, file=None, **options) + + It bombs (RuntimeError) if both 'filename' and 'file' are None; + 'filename' should be a string, and 'file' a file object (or + something that provides 'readline()' and 'close()' methods). It is + recommended that you supply at least 'filename', so that TextFile + can include it in warning messages. If 'file' is not supplied, + TextFile creates its own using 'io.open()'. + + The options are all boolean, and affect the value returned by + 'readline()': + strip_comments [default: true] + strip from "#" to end-of-line, as well as any whitespace + leading up to the "#" -- unless it is escaped by a backslash + lstrip_ws [default: false] + strip leading whitespace from each line before returning it + rstrip_ws [default: true] + strip trailing whitespace (including line terminator!) from + each line before returning it + skip_blanks [default: true} + skip lines that are empty *after* stripping comments and + whitespace. (If both lstrip_ws and rstrip_ws are false, + then some lines may consist of solely whitespace: these will + *not* be skipped, even if 'skip_blanks' is true.) + join_lines [default: false] + if a backslash is the last non-newline character on a line + after stripping comments and whitespace, join the following line + to it to form one "logical line"; if N consecutive lines end + with a backslash, then N+1 physical lines will be joined to + form one logical line. + collapse_join [default: false] + strip leading whitespace from lines that are joined to their + predecessor; only matters if (join_lines and not lstrip_ws) + errors [default: 'strict'] + error handler used to decode the file content + + Note that since 'rstrip_ws' can strip the trailing newline, the + semantics of 'readline()' must differ from those of the builtin file + object's 'readline()' method! In particular, 'readline()' returns + None for end-of-file: an empty string might just be a blank line (or + an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is + not.""" + + default_options = { + 'strip_comments': 1, + 'skip_blanks': 1, + 'lstrip_ws': 0, + 'rstrip_ws': 1, + 'join_lines': 0, + 'collapse_join': 0, + 'errors': 'strict', + } def __init__(self, filename=None, file=None, **options): """Construct a new TextFile object. At least one of 'filename' - (a string) and 'file' (a file-like object) must be supplied. - They keyword argument options are described above and affect - the values returned by 'readline()'.""" + (a string) and 'file' (a file-like object) must be supplied. + They keyword argument options are described above and affect + the values returned by 'readline()'.""" if filename is None and file is None: - raise RuntimeError("you must supply either or both of 'filename' and 'file'") + raise RuntimeError( + "you must supply either or both of 'filename' and 'file'" + ) # set values for all options -- either from client option hash # or fallback to default_options @@ -101,7 +104,7 @@ class TextFile: else: self.filename = filename self.file = file - self.current_line = 0 # assuming that file is at BOF! + self.current_line = 0 # assuming that file is at BOF! # 'linebuf' is a stack of lines that will be emptied before we # actually read from the file; it's only populated by an @@ -110,14 +113,14 @@ class TextFile: def open(self, filename): """Open a new file named 'filename'. This overrides both the - 'filename' and 'file' arguments to the constructor.""" + 'filename' and 'file' arguments to the constructor.""" self.filename = filename self.file = io.open(self.filename, 'r', errors=self.errors) self.current_line = 0 def close(self): """Close the current file and forget everything we know about it - (filename, current line number).""" + (filename, current line number).""" file = self.file self.file = None self.filename = None @@ -141,24 +144,24 @@ class TextFile: def warn(self, msg, line=None): """Print (to stderr) a warning message tied to the current logical - line in the current file. If the current logical line in the - file spans multiple physical lines, the warning refers to the - whole range, eg. "lines 3-5". If 'line' supplied, it overrides - the current line number; it may be a list or tuple to indicate a - range of physical lines, or an integer for a single physical - line.""" + line in the current file. If the current logical line in the + file spans multiple physical lines, the warning refers to the + whole range, eg. "lines 3-5". If 'line' supplied, it overrides + the current line number; it may be a list or tuple to indicate a + range of physical lines, or an integer for a single physical + line.""" sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n") def readline(self): """Read and return a single logical line from the current file (or - from an internal buffer if lines have previously been "unread" - with 'unreadline()'). If the 'join_lines' option is true, this - may involve reading multiple physical lines concatenated into a - single string. Updates the current line number, so calling - 'warn()' after 'readline()' emits a warning about the physical - line(s) just read. Returns None on end-of-file, since the empty - string can occur if 'rstrip_ws' is true but 'strip_blanks' is - not.""" + from an internal buffer if lines have previously been "unread" + with 'unreadline()'). If the 'join_lines' option is true, this + may involve reading multiple physical lines concatenated into a + single string. Updates the current line number, so calling + 'warn()' after 'readline()' emits a warning about the physical + line(s) just read. Returns None on end-of-file, since the empty + string can occur if 'rstrip_ws' is true but 'strip_blanks' is + not.""" # If any "unread" lines waiting in 'linebuf', return the top # one. (We don't actually buffer read-ahead data -- lines only # get put in 'linebuf' if the client explicitly does an @@ -187,12 +190,12 @@ class TextFile: # lurking in there) and otherwise leave the line alone. pos = line.find("#") - if pos == -1: # no "#" -- no comments + if pos == -1: # no "#" -- no comments pass # It's definitely a comment -- either "#" is the first # character, or it's elsewhere and unescaped. - elif pos == 0 or line[pos-1] != "\\": + elif pos == 0 or line[pos - 1] != "\\": # Have to preserve the trailing newline, because it's # the job of a later step (rstrip_ws) to remove it -- # and if rstrip_ws is false, we'd better preserve it! @@ -211,15 +214,14 @@ class TextFile: # result in "hello there". if line.strip() == "": continue - else: # it's an escaped "#" + else: # it's an escaped "#" line = line.replace("\\#", "#") # did previous line end with a backslash? then accumulate if self.join_lines and buildup_line: # oops: end of file if line is None: - self.warn("continuation line immediately precedes " - "end-of-file") + self.warn("continuation line immediately precedes " "end-of-file") return buildup_line if self.collapse_join: @@ -230,11 +232,10 @@ class TextFile: if isinstance(self.current_line, list): self.current_line[1] = self.current_line[1] + 1 else: - self.current_line = [self.current_line, - self.current_line + 1] + self.current_line = [self.current_line, self.current_line + 1] # just an ordinary line, read it as usual else: - if line is None: # eof + if line is None: # eof return None # still have to be careful about incrementing the line number! @@ -271,7 +272,7 @@ class TextFile: def readlines(self): """Read and return the list of all logical lines remaining in the - current file.""" + current file.""" lines = [] while True: line = self.readline() @@ -281,6 +282,6 @@ class TextFile: def unreadline(self, line): """Push 'line' (a string) onto an internal buffer that will be - checked by future 'readline()' calls. Handy for implementing - a parser with line-at-a-time lookahead.""" + checked by future 'readline()' calls. Handy for implementing + a parser with line-at-a-time lookahead.""" self.linebuf.append(line) diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py index 715408f..4be74fd 100644 --- a/setuptools/_distutils/unixccompiler.py +++ b/setuptools/_distutils/unixccompiler.py @@ -17,10 +17,8 @@ import os, sys, re, shlex from distutils import sysconfig from distutils.dep_util import newer -from distutils.ccompiler import \ - CCompiler, gen_preprocess_options, gen_lib_options -from distutils.errors import \ - DistutilsExecError, CompileError, LibError, LinkError +from distutils.ccompiler import CCompiler, gen_preprocess_options, gen_lib_options +from distutils.errors import DistutilsExecError, CompileError, LibError, LinkError from distutils import log from ._macos_compat import compiler_fixup @@ -110,15 +108,16 @@ class UnixCCompiler(CCompiler): # are pretty generic; they will probably have to be set by an outsider # (eg. using information discovered by the sysconfig about building # Python extensions). - executables = {'preprocessor' : None, - 'compiler' : ["cc"], - 'compiler_so' : ["cc"], - 'compiler_cxx' : ["cc"], - 'linker_so' : ["cc", "-shared"], - 'linker_exe' : ["cc"], - 'archiver' : ["ar", "-cr"], - 'ranlib' : None, - } + executables = { + 'preprocessor': None, + 'compiler': ["cc"], + 'compiler_so': ["cc"], + 'compiler_cxx': ["cc"], + 'linker_so': ["cc", "-shared"], + 'linker_exe': ["cc"], + 'archiver': ["ar", "-cr"], + 'ranlib': None, + } if sys.platform[:6] == "darwin": executables['ranlib'] = ["ranlib"] @@ -129,7 +128,7 @@ class UnixCCompiler(CCompiler): # reasonable common default here, but it's not necessarily used on all # Unices! - src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"] + src_extensions = [".c", ".C", ".cc", ".cxx", ".cpp", ".m"] obj_extension = ".o" static_lib_extension = ".a" shared_lib_extension = ".so" @@ -140,8 +139,15 @@ class UnixCCompiler(CCompiler): if sys.platform == "cygwin": exe_extension = ".exe" - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): + def preprocess( + self, + source, + output_file=None, + macros=None, + include_dirs=None, + extra_preargs=None, + extra_postargs=None, + ): fixed_args = self._fix_compile_args(None, macros, include_dirs) ignore, macros, include_dirs = fixed_args pp_opts = gen_preprocess_options(macros, include_dirs) @@ -167,26 +173,22 @@ class UnixCCompiler(CCompiler): raise CompileError(msg) def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - compiler_so = compiler_fixup( - self.compiler_so, cc_args + extra_postargs) + compiler_so = compiler_fixup(self.compiler_so, cc_args + extra_postargs) try: - self.spawn(compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) + self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) except DistutilsExecError as msg: raise CompileError(msg) - def create_static_lib(self, objects, output_libname, - output_dir=None, debug=0, target_lang=None): + def create_static_lib( + self, objects, output_libname, output_dir=None, debug=0, target_lang=None + ): objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = \ - self.library_filename(output_libname, output_dir=output_dir) + output_filename = self.library_filename(output_libname, output_dir=output_dir) if self._need_link(objects, output_filename): self.mkpath(os.path.dirname(output_filename)) - self.spawn(self.archiver + - [output_filename] + - objects + self.objects) + self.spawn(self.archiver + [output_filename] + objects + self.objects) # Not many Unices required ranlib anymore -- SunOS 4.x is, I # think the only major Unix that does. Maybe we need some @@ -201,26 +203,34 @@ class UnixCCompiler(CCompiler): else: log.debug("skipping %s (up-to-date)", output_filename) - def link(self, target_desc, objects, - output_filename, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): + def link( + self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None, + ): objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) + fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) libraries, library_dirs, runtime_library_dirs = fixed_args - lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, - libraries) + lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) if not isinstance(output_dir, (str, type(None))): raise TypeError("'output_dir' must be a string or None") if output_dir is not None: output_filename = os.path.join(output_dir, output_filename) if self._need_link(objects, output_filename): - ld_args = (objects + self.objects + - lib_opts + ['-o', output_filename]) + ld_args = objects + self.objects + lib_opts + ['-o', output_filename] if debug: ld_args[:0] = ['-g'] if extra_preargs: @@ -259,8 +269,10 @@ class UnixCCompiler(CCompiler): def library_dir_option(self, dir): return "-L" + dir - def _is_gcc(self, compiler_name): - return "gcc" in compiler_name or "g++" in compiler_name + def _is_gcc(self): + cc_var = sysconfig.get_config_var("CC") + compiler = os.path.basename(shlex.split(cc_var)[0]) + return "gcc" in compiler or "g++" in compiler def runtime_library_dir_option(self, dir): # XXX Hackish, at the very least. See Python bug #445902: @@ -276,20 +288,21 @@ class UnixCCompiler(CCompiler): # this time, there's no way to determine this information from # the configuration data stored in the Python installation, so # we use this hack. - compiler = os.path.basename(shlex.split(sysconfig.get_config_var("CC"))[0]) if sys.platform[:6] == "darwin": from distutils.util import get_macosx_target_ver, split_version + macosx_target_ver = get_macosx_target_ver() if macosx_target_ver and split_version(macosx_target_ver) >= [10, 5]: return "-Wl,-rpath," + dir - else: # no support for -rpath on earlier macOS versions + else: # no support for -rpath on earlier macOS versions return "-L" + dir elif sys.platform[:7] == "freebsd": return "-Wl,-rpath=" + dir elif sys.platform[:5] == "hp-ux": - if self._is_gcc(compiler): - return ["-Wl,+s", "-L" + dir] - return ["+s", "-L" + dir] + return [ + "-Wl,+s" if self._is_gcc() else "+s", + "-L" + dir, + ] # For all compilers, `-Wl` is the presumed way to # pass a compiler option to the linker and `-R` is @@ -336,8 +349,6 @@ class UnixCCompiler(CCompiler): else: sysroot = m.group(1) - - for dir in dirs: shared = os.path.join(dir, shared_f) dylib = os.path.join(dir, dylib_f) @@ -345,8 +356,9 @@ class UnixCCompiler(CCompiler): xcode_stub = os.path.join(dir, xcode_stub_f) if sys.platform == 'darwin' and ( - dir.startswith('/System/') or ( - dir.startswith('/usr/') and not dir.startswith('/usr/local/'))): + dir.startswith('/System/') + or (dir.startswith('/usr/') and not dir.startswith('/usr/local/')) + ): shared = os.path.join(sysroot, dir[1:], shared_f) dylib = os.path.join(sysroot, dir[1:], dylib_f) diff --git a/setuptools/_distutils/util.py b/setuptools/_distutils/util.py index 6d506d7..d59c362 100644 --- a/setuptools/_distutils/util.py +++ b/setuptools/_distutils/util.py @@ -4,10 +4,11 @@ Miscellaneous utility functions -- anything that doesn't fit into one of the other *util.py modules. """ +import importlib.util import os import re -import importlib.util import string +import subprocess import sys import sysconfig from distutils.errors import DistutilsPlatformError @@ -15,19 +16,18 @@ from distutils.dep_util import newer from distutils.spawn import spawn from distutils import log from distutils.errors import DistutilsByteCompileError -from .py35compat import _optim_args_from_interpreter_flags def get_host_platform(): - """Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. + """ + Return a string that identifies the current platform. Use this + function to distinguish platform-specific build directories and + platform-specific built distributions. """ - # We initially exposed platforms as defined in Python 3.9 + # This function initially exposed platforms as defined in Python 3.9 # even with older Python versions when distutils was split out. - # Now that we delegate to stdlib sysconfig we need to restore this - # in case anyone has started to depend on it. + # Now it delegates to stdlib sysconfig, but maintains compatibility. if sys.version_info < (3, 8): if os.name == 'nt': @@ -41,43 +41,49 @@ def get_host_platform(): osname, host, release, version, machine = os.uname() if osname[:3] == "aix": from .py38compat import aix_platform + return aix_platform(osname, version, release) return sysconfig.get_platform() + def get_platform(): if os.name == 'nt': TARGET_TO_PLAT = { - 'x86' : 'win32', - 'x64' : 'win-amd64', - 'arm' : 'win-arm32', + 'x86': 'win32', + 'x64': 'win-amd64', + 'arm': 'win-arm32', 'arm64': 'win-arm64', } - return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform() - else: - return get_host_platform() + target = os.environ.get('VSCMD_ARG_TGT_ARCH') + return TARGET_TO_PLAT.get(target) or get_host_platform() + return get_host_platform() if sys.platform == 'darwin': - _syscfg_macosx_ver = None # cache the version pulled from sysconfig + _syscfg_macosx_ver = None # cache the version pulled from sysconfig MACOSX_VERSION_VAR = 'MACOSX_DEPLOYMENT_TARGET' + def _clear_cached_macosx_ver(): """For testing only. Do not call.""" global _syscfg_macosx_ver _syscfg_macosx_ver = None + def get_macosx_target_ver_from_syscfg(): """Get the version of macOS latched in the Python interpreter configuration. Returns the version as a string or None if can't obtain one. Cached.""" global _syscfg_macosx_ver if _syscfg_macosx_ver is None: from distutils import sysconfig + ver = sysconfig.get_config_var(MACOSX_VERSION_VAR) or '' if ver: _syscfg_macosx_ver = ver return _syscfg_macosx_ver + def get_macosx_target_ver(): """Return the version of macOS for which we are building. @@ -95,12 +101,16 @@ def get_macosx_target_ver(): # ensures extension modules are built with correct compatibility # values, specifically LDSHARED which can use # '-undefined dynamic_lookup' which only works on >= 10.3. - if syscfg_ver and split_version(syscfg_ver) >= [10, 3] and \ - split_version(env_ver) < [10, 3]: - my_msg = ('$' + MACOSX_VERSION_VAR + ' mismatch: ' - 'now "%s" but "%s" during configure; ' - 'must use 10.3 or later' - % (env_ver, syscfg_ver)) + if ( + syscfg_ver + and split_version(syscfg_ver) >= [10, 3] + and split_version(env_ver) < [10, 3] + ): + my_msg = ( + '$' + MACOSX_VERSION_VAR + ' mismatch: ' + 'now "%s" but "%s" during configure; ' + 'must use 10.3 or later' % (env_ver, syscfg_ver) + ) raise DistutilsPlatformError(my_msg) return env_ver return syscfg_ver @@ -111,7 +121,7 @@ def split_version(s): return [int(n) for n in s.split('.')] -def convert_path (pathname): +def convert_path(pathname): """Return 'pathname' as a name that will work on the native filesystem, i.e. split it on '/' and put it back together again using the current directory separator. Needed because filenames in the setup script are @@ -136,10 +146,11 @@ def convert_path (pathname): return os.curdir return os.path.join(*paths) + # convert_path () -def change_root (new_root, pathname): +def change_root(new_root, pathname): """Return 'pathname' with 'new_root' prepended. If 'pathname' is relative, this is equivalent to "os.path.join(new_root,pathname)". Otherwise, it requires making 'pathname' relative and then joining the @@ -157,12 +168,13 @@ def change_root (new_root, pathname): path = path[1:] return os.path.join(new_root, path) - else: - raise DistutilsPlatformError("nothing known about platform '%s'" % os.name) + raise DistutilsPlatformError(f"nothing known about platform '{os.name}'") _environ_checked = 0 -def check_environ (): + + +def check_environ(): """Ensure that 'os.environ' has all the environment variables we guarantee that users can use in config files, command-line options, etc. Currently this includes: @@ -177,6 +189,7 @@ def check_environ (): if os.name == 'posix' and 'HOME' not in os.environ: try: import pwd + os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] except (ImportError, KeyError): # bpo-10496: if the current user identifier doesn't exist in the @@ -189,7 +202,7 @@ def check_environ (): _environ_checked = 1 -def subst_vars (s, local_vars): +def subst_vars(s, local_vars): """ Perform variable substitution on 'string'. Variables are indicated by format-style braces ("{var}"). @@ -207,19 +220,20 @@ def subst_vars (s, local_vars): except KeyError as var: raise ValueError(f"invalid variable {var}") -# subst_vars () - def _subst_compat(s): """ Replace shell/Perl-style variable substitution with format-style. For compatibility. """ + def _subst(match): return f'{{{match.group(1)}}}' + repl = re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) if repl != s: import warnings + warnings.warn( "shell/Perl-style substitions are deprecated", DeprecationWarning, @@ -227,7 +241,7 @@ def _subst_compat(s): return repl -def grok_environment_error (exc, prefix="error: "): +def grok_environment_error(exc, prefix="error: "): # Function kept for backward compatibility. # Used to try clever things with EnvironmentErrors, # but nowadays str(exception) produces good messages. @@ -236,13 +250,16 @@ def grok_environment_error (exc, prefix="error: "): # Needed by 'split_quoted()' _wordchars_re = _squote_re = _dquote_re = None + + def _init_regex(): global _wordchars_re, _squote_re, _dquote_re _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') -def split_quoted (s): + +def split_quoted(s): """Split a string up according to Unix shell-like rules for quotes and backslashes. In short: words are delimited by spaces, as long as those spaces are not escaped by a backslash, or inside a quoted string. @@ -256,7 +273,8 @@ def split_quoted (s): # This is a nice algorithm for splitting up a single string, since it # doesn't require character-by-character examination. It was a little # bit of a brain-bender to get it working right, though... - if _wordchars_re is None: _init_regex() + if _wordchars_re is None: + _init_regex() s = s.strip() words = [] @@ -269,20 +287,23 @@ def split_quoted (s): words.append(s[:end]) break - if s[end] in string.whitespace: # unescaped, unquoted whitespace: now - words.append(s[:end]) # we definitely have a word delimiter + if s[end] in string.whitespace: + # unescaped, unquoted whitespace: now + # we definitely have a word delimiter + words.append(s[:end]) s = s[end:].lstrip() pos = 0 - elif s[end] == '\\': # preserve whatever is being escaped; - # will become part of the current word - s = s[:end] + s[end+1:] - pos = end+1 + elif s[end] == '\\': + # preserve whatever is being escaped; + # will become part of the current word + s = s[:end] + s[end + 1 :] + pos = end + 1 else: - if s[end] == "'": # slurp singly-quoted string + if s[end] == "'": # slurp singly-quoted string m = _squote_re.match(s, end) - elif s[end] == '"': # slurp doubly-quoted string + elif s[end] == '"': # slurp doubly-quoted string m = _dquote_re.match(s, end) else: raise RuntimeError("this can't happen (bad char '%c')" % s[end]) @@ -291,7 +312,7 @@ def split_quoted (s): raise ValueError("bad string (mismatched %s quotes?)" % s[end]) (beg, end) = m.span() - s = s[:beg] + s[beg+1:end-1] + s[end:] + s = s[:beg] + s[beg + 1 : end - 1] + s[end:] pos = m.end() - 2 if pos >= len(s): @@ -300,10 +321,11 @@ def split_quoted (s): return words + # split_quoted () -def execute (func, args, msg=None, verbose=0, dry_run=0): +def execute(func, args, msg=None, verbose=0, dry_run=0): """Perform some action that affects the outside world (eg. by writing to the filesystem). Such actions are special because they are disabled by the 'dry_run' flag. This method takes care of all @@ -314,7 +336,7 @@ def execute (func, args, msg=None, verbose=0, dry_run=0): """ if msg is None: msg = "%s%r" % (func.__name__, args) - if msg[-2:] == ',)': # correct for singleton tuple + if msg[-2:] == ',)': # correct for singleton tuple msg = msg[0:-2] + ')' log.info(msg) @@ -322,7 +344,7 @@ def execute (func, args, msg=None, verbose=0, dry_run=0): func(*args) -def strtobool (val): +def strtobool(val): """Convert a string representation of truth to true (1) or false (0). True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values @@ -338,11 +360,16 @@ def strtobool (val): raise ValueError("invalid truth value %r" % (val,)) -def byte_compile (py_files, - optimize=0, force=0, - prefix=None, base_dir=None, - verbose=1, dry_run=0, - direct=None): +def byte_compile( + py_files, + optimize=0, + force=0, + prefix=None, + base_dir=None, + verbose=1, + dry_run=0, + direct=None, +): """Byte-compile a collection of Python source files to .pyc files in a __pycache__ subdirectory. 'py_files' is a list of files to compile; any files that don't end in ".py" are silently @@ -372,10 +399,6 @@ def byte_compile (py_files, it set to None. """ - # Late import to fix a bootstrap issue: _posixsubprocess is built by - # setup.py, but setup.py uses distutils. - import subprocess - # nothing is done if sys.dont_write_bytecode is True if sys.dont_write_bytecode: raise DistutilsByteCompileError('byte-compiling is disabled.') @@ -391,16 +414,18 @@ def byte_compile (py_files, # optimize mode, or if either optimization level was requested by # the caller. if direct is None: - direct = (__debug__ and optimize == 0) + direct = __debug__ and optimize == 0 # "Indirect" byte-compilation: write a temporary script and then # run it with the appropriate flags. if not direct: try: from tempfile import mkstemp + (script_fd, script_name) = mkstemp(".py") except ImportError: from tempfile import mktemp + (script_fd, script_name) = None, mktemp(".py") log.info("writing byte-compilation script '%s'", script_name) if not dry_run: @@ -410,10 +435,12 @@ def byte_compile (py_files, script = open(script_name, "w") with script: - script.write("""\ + script.write( + """\ from distutils.util import byte_compile files = [ -""") +""" + ) # XXX would be nice to write absolute filenames, just for # safety's sake (script should be more robust in the face of @@ -425,24 +452,22 @@ files = [ # problem is that it's really a directory, but I'm treating it # as a dumb string, so trailing slashes and so forth matter. - #py_files = map(os.path.abspath, py_files) - #if prefix: - # prefix = os.path.abspath(prefix) - script.write(",\n".join(map(repr, py_files)) + "]\n") - script.write(""" + script.write( + """ byte_compile(files, optimize=%r, force=%r, prefix=%r, base_dir=%r, verbose=%r, dry_run=0, direct=1) -""" % (optimize, force, prefix, base_dir, verbose)) +""" + % (optimize, force, prefix, base_dir, verbose) + ) cmd = [sys.executable] - cmd.extend(_optim_args_from_interpreter_flags()) + cmd.extend(subprocess._optim_args_from_interpreter_flags()) cmd.append(script_name) spawn(cmd, dry_run=dry_run) - execute(os.remove, (script_name,), "removing %s" % script_name, - dry_run=dry_run) + execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run) # "Direct" byte-compilation: use the py_compile module to compile # right here, right now. Note that the script generated in indirect @@ -462,16 +487,17 @@ byte_compile(files, optimize=%r, force=%r, # dfile - purported source filename (same as 'file' by default) if optimize >= 0: opt = '' if optimize == 0 else optimize - cfile = importlib.util.cache_from_source( - file, optimization=opt) + cfile = importlib.util.cache_from_source(file, optimization=opt) else: cfile = importlib.util.cache_from_source(file) dfile = file if prefix: - if file[:len(prefix)] != prefix: - raise ValueError("invalid prefix: filename %r doesn't start with %r" - % (file, prefix)) - dfile = dfile[len(prefix):] + if file[: len(prefix)] != prefix: + raise ValueError( + "invalid prefix: filename %r doesn't start with %r" + % (file, prefix) + ) + dfile = dfile[len(prefix) :] if base_dir: dfile = os.path.join(base_dir, dfile) @@ -482,12 +508,10 @@ byte_compile(files, optimize=%r, force=%r, if not dry_run: compile(file, cfile, dfile) else: - log.debug("skipping byte-compilation of %s to %s", - file, cfile_base) + log.debug("skipping byte-compilation of %s to %s", file, cfile_base) -# byte_compile () -def rfc822_escape (header): +def rfc822_escape(header): """Return a version of the string escaped for inclusion in an RFC-822 header, by ensuring there are 8 spaces space after each newline. """ diff --git a/setuptools/_distutils/version.py b/setuptools/_distutils/version.py index 31f504e..a406a30 100644 --- a/setuptools/_distutils/version.py +++ b/setuptools/_distutils/version.py @@ -49,7 +49,7 @@ class Version: rich comparisons to _cmp. """ - def __init__ (self, vstring=None): + def __init__(self, vstring=None): if vstring: self.parse(vstring) warnings.warn( @@ -59,7 +59,7 @@ class Version: stacklevel=2, ) - def __repr__ (self): + def __repr__(self): return "%s ('%s')" % (self.__class__.__name__, str(self)) def __eq__(self, other): @@ -110,7 +110,7 @@ class Version: # instance of your version class) -class StrictVersion (Version): +class StrictVersion(Version): """Version numbering for anal retentives and software idealists. Implements the standard interface for version number classes as @@ -147,17 +147,16 @@ class StrictVersion (Version): in the distutils documentation. """ - version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', - re.VERBOSE | re.ASCII) + version_re = re.compile( + r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', re.VERBOSE | re.ASCII + ) - - def parse (self, vstring): + def parse(self, vstring): match = self.version_re.match(vstring) if not match: raise ValueError("invalid version number '%s'" % vstring) - (major, minor, patch, prerelease, prerelease_num) = \ - match.group(1, 2, 4, 5, 6) + (major, minor, patch, prerelease, prerelease_num) = match.group(1, 2, 4, 5, 6) if patch: self.version = tuple(map(int, [major, minor, patch])) @@ -169,8 +168,7 @@ class StrictVersion (Version): else: self.prerelease = None - - def __str__ (self): + def __str__(self): if self.version[2] == 0: vstring = '.'.join(map(str, self.version[0:2])) @@ -182,8 +180,7 @@ class StrictVersion (Version): return vstring - - def _cmp (self, other): + def _cmp(self, other): if isinstance(other, str): with suppress_known_deprecation(): other = StrictVersion(other) @@ -204,13 +201,13 @@ class StrictVersion (Version): # case 3: self doesn't have prerelease, other does: self is greater # case 4: both have prerelease: must compare them! - if (not self.prerelease and not other.prerelease): + if not self.prerelease and not other.prerelease: return 0 - elif (self.prerelease and not other.prerelease): + elif self.prerelease and not other.prerelease: return -1 - elif (not self.prerelease and other.prerelease): + elif not self.prerelease and other.prerelease: return 1 - elif (self.prerelease and other.prerelease): + elif self.prerelease and other.prerelease: if self.prerelease == other.prerelease: return 0 elif self.prerelease < other.prerelease: @@ -220,6 +217,7 @@ class StrictVersion (Version): else: assert False, "never get here" + # end class StrictVersion @@ -287,7 +285,8 @@ class StrictVersion (Version): # the Right Thing" (ie. the code matches the conception). But I'd rather # have a conception that matches common notions about version numbers. -class LooseVersion (Version): + +class LooseVersion(Version): """Version numbering for anarchists and software realists. Implements the standard interface for version number classes as @@ -322,13 +321,12 @@ class LooseVersion (Version): component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) - def parse (self, vstring): + def parse(self, vstring): # I've given up on thinking I can reconstruct the version string # from the parsed tuple -- so I just store the string here for # use by __str__ self.vstring = vstring - components = [x for x in self.component_re.split(vstring) - if x and x != '.'] + components = [x for x in self.component_re.split(vstring) if x and x != '.'] for i, obj in enumerate(components): try: components[i] = int(obj) @@ -337,16 +335,13 @@ class LooseVersion (Version): self.version = components - - def __str__ (self): + def __str__(self): return self.vstring - - def __repr__ (self): + def __repr__(self): return "LooseVersion ('%s')" % str(self) - - def _cmp (self, other): + def _cmp(self, other): if isinstance(other, str): other = LooseVersion(other) elif not isinstance(other, LooseVersion): diff --git a/setuptools/_distutils/versionpredicate.py b/setuptools/_distutils/versionpredicate.py index 55f25d9..6ea1192 100644 --- a/setuptools/_distutils/versionpredicate.py +++ b/setuptools/_distutils/versionpredicate.py @@ -5,11 +5,10 @@ import distutils.version import operator -re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", - re.ASCII) +re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", re.ASCII) # (package) (rest) -re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses +re_paren = re.compile(r"^\s*\((.*)\)\s*$") # (list) inside of parentheses re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$") # (comp) (version) @@ -27,8 +26,16 @@ def splitUp(pred): other = distutils.version.StrictVersion(verStr) return (comp, other) -compmap = {"<": operator.lt, "<=": operator.le, "==": operator.eq, - ">": operator.gt, ">=": operator.ge, "!=": operator.ne} + +compmap = { + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + ">": operator.gt, + ">=": operator.ge, + "!=": operator.ne, +} + class VersionPredicate: """Parse and test package version predicates. @@ -96,8 +103,7 @@ class VersionPredicate: """ def __init__(self, versionPredicateStr): - """Parse a version predicate string. - """ + """Parse a version predicate string.""" # Fields: # name: package name # pred: list of (comparison string, StrictVersion) @@ -117,8 +123,7 @@ class VersionPredicate: str = match.groups()[0] self.pred = [splitUp(aPred) for aPred in str.split(",")] if not self.pred: - raise ValueError("empty parenthesized list in %r" - % versionPredicateStr) + raise ValueError("empty parenthesized list in %r" % versionPredicateStr) else: self.pred = [] @@ -142,6 +147,7 @@ class VersionPredicate: _provision_rx = None + def split_provision(value): """Return the name and optional version number of a provision. @@ -156,8 +162,8 @@ def split_provision(value): global _provision_rx if _provision_rx is None: _provision_rx = re.compile( - r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", - re.ASCII) + r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", re.ASCII + ) value = value.strip() m = _provision_rx.match(value) if not m: diff --git a/setuptools/command/build.py b/setuptools/command/build.py new file mode 100644 index 0000000..12a4362 --- /dev/null +++ b/setuptools/command/build.py @@ -0,0 +1,24 @@ +from distutils.command.build import build as _build +import warnings + +from setuptools import SetuptoolsDeprecationWarning + + +_ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"} + + +class build(_build): + # copy to avoid sharing the object with parent class + sub_commands = _build.sub_commands[:] + + def run(self): + subcommands = {cmd[0] for cmd in _build.sub_commands} + if subcommands - _ORIGINAL_SUBCOMMANDS: + msg = """ + It seems that you are using `distutils.command.build` to add + new subcommands. Using `distutils` directly is considered deprecated, + please use `setuptools.command.build`. + """ + warnings.warn(msg, SetuptoolsDeprecationWarning) + self.sub_commands = _build.sub_commands + super().run() diff --git a/setuptools/tests/test_build.py b/setuptools/tests/test_build.py new file mode 100644 index 0000000..cefb3d3 --- /dev/null +++ b/setuptools/tests/test_build.py @@ -0,0 +1,63 @@ +from contextlib import contextmanager +from setuptools import Command, SetuptoolsDeprecationWarning +from setuptools.dist import Distribution +from setuptools.command.build import build +from distutils.command.build import build as distutils_build + +import pytest + + +def test_distribution_gives_setuptools_build_obj(tmpdir_cwd): + """ + Check that the setuptools Distribution uses the + setuptools specific build object. + """ + + dist = Distribution(dict( + script_name='setup.py', + script_args=['build'], + packages=[], + package_data={'': ['path/*']}, + )) + assert isinstance(dist.get_command_obj("build"), build) + + +@contextmanager +def _restore_sub_commands(): + orig = distutils_build.sub_commands[:] + try: + yield + finally: + distutils_build.sub_commands = orig + + +class Subcommand(Command): + """Dummy command to be used in tests""" + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + raise NotImplementedError("just to check if the command runs") + + +@_restore_sub_commands() +def test_subcommand_in_distutils(tmpdir_cwd): + """ + Ensure that sub commands registered in ``distutils`` run, + after instructing the users to migrate to ``setuptools``. + """ + dist = Distribution(dict( + packages=[], + cmdclass={'subcommand': Subcommand}, + )) + distutils_build.sub_commands.append(('subcommand', None)) + + warning_msg = "please use .setuptools.command.build." + with pytest.warns(SetuptoolsDeprecationWarning, match=warning_msg): + # For backward compatibility, the subcommand should run anyway: + with pytest.raises(NotImplementedError, match="the command runs"): + dist.run_command("build")