Imported Upstream version 51.3.0 upstream/51.3.0
authorJinWang An <jinwang.an@samsung.com>
Mon, 27 Mar 2023 08:02:30 +0000 (17:02 +0900)
committerJinWang An <jinwang.an@samsung.com>
Mon, 27 Mar 2023 08:02:30 +0000 (17:02 +0900)
28 files changed:
.bumpversion.cfg
.flake8
.github/workflows/main.yml
.pre-commit-config.yaml
CHANGES.rst
README.rst
docs/build_meta.rst
docs/userguide/development_mode.rst
pkg_resources/__init__.py
pkg_resources/extern/__init__.py
pkg_resources/tests/data/my-test-package-zip/my-test-package.zip [new file with mode: 0644]
pkg_resources/tests/test_find_distributions.py
pyproject.toml
setup.cfg
setuptools/archive_util.py
setuptools/command/bdist_egg.py
setuptools/command/easy_install.py
setuptools/command/egg_info.py
setuptools/dist.py
setuptools/extern/__init__.py
setuptools/glob.py
setuptools/installer.py
setuptools/msvc.py
setuptools/package_index.py
setuptools/ssl_support.py
setuptools/tests/test_egg_info.py
skeleton.md
tox.ini

index facadd5f3b8cbfcc0633ea92d2b779e92677f0b1..0789cb984702351bf1db72ee3d516424ff965d5b 100644 (file)
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 51.2.0
+current_version = 51.3.0
 commit = True
 tag = True
 
diff --git a/.flake8 b/.flake8
index 8bc2d27060d3e279389ea801ebc27220f7566b0b..dd3cc20661bfc2638f2e3260f8e241ce9fa04336 100644 (file)
--- a/.flake8
+++ b/.flake8
@@ -1,14 +1,15 @@
 [flake8]
 max-line-length = 88
-exclude =
+
+# jaraco/skeleton#34
+max-complexity = 10
+
+extend-exclude =
+       build
        setuptools/_vendor
+       setuptools/_distutils
        pkg_resources/_vendor
-ignore =
-       # W503 violates spec https://github.com/PyCQA/pycodestyle/issues/513
-       W503
-       # W504 has issues https://github.com/OCA/maintainer-quality-tools/issues/545
-       W504
+
+extend-ignore =
        # Black creates whitespace before colon
        E203
-       setuptools/site-patch.py F821
-       setuptools/py*compat.py F811
index 31b941444d2f2aca43602df6ddeb19afacb743d9..f544814cf84c68e8c0c7c50e0b2c751a5ae1bc25 100644 (file)
@@ -1,4 +1,4 @@
-name: Automated Tests
+name: tests
 
 on: [push, pull_request]
 
index 6639c78c6cac945a80dde84cf53979f0d9626982..c15ab0c9e6a0cd7b1711f70704cbb5c1757b953f 100644 (file)
@@ -1,10 +1,10 @@
 repos:
 - repo: https://github.com/psf/black
-  rev: stable
+  rev: 20.8b1
   hooks:
   - id: black
 
 - repo: https://github.com/asottile/blacken-docs
-  rev: v1.8.0
+  rev: v1.9.1
   hooks:
   - id: blacken-docs
index e51b5e646c8d587af925bf3da2e0dc02d10f8468..48669d992cad2aea0bfbdb82cd5388b9f63a780d 100644 (file)
@@ -1,3 +1,17 @@
+v51.3.0
+-------
+
+
+Changes
+^^^^^^^
+* #1390: Newlines in metadata description/Summary now trigger a ValueError.
+* #2481: Define ``create_module()`` and ``exec_module()`` methods in ``VendorImporter``
+  to get rid of ``ImportWarning`` -- by :user:`hroncok`
+* #2489: ``pkg_resources`` behavior for zipimport now matches the regular behavior, and finds
+  ``.egg-info`` (previoulsy would only find ``.dist-info``) -- by :user:`thatch`
+* #2529: Fixed an issue where version tags may be added multiple times
+
+
 v51.2.0
 -------
 
index 526d1222da9a3487c8eab6aee2548412cdbe9986..9bd03cf949ff7ccafa2f31b446d9f996a5d09a44 100644 (file)
@@ -6,9 +6,9 @@
 
 .. _PyPI link: https://pypi.org/project/setuptools
 
-.. image:: https://github.com/pypa/setuptools/workflows/Automated%20Tests/badge.svg
-   :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22Automated+Tests%22
-   :alt: Automated Tests
+.. image:: https://github.com/pypa/setuptools/workflows/tests/badge.svg
+   :target: https://github.com/pypa/setuptools/actions?query=workflow%3A%22tests%22
+   :alt: tests
 
 .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
    :target: https://github.com/psf/black
index 9744488e9930f6bcf921fc130f14f7248fa2e2bc..2ad5ae267e64c645bd553230137bd8b814fb94e6 100644 (file)
@@ -7,7 +7,7 @@ What is it?
 
 Python packaging has come `a long way <https://www.bernat.tech/pep-517-518/>`_.
 
-The traditional ``setuptools`` way of packgaging Python modules
+The traditional ``setuptools`` way of packaging Python modules
 uses a ``setup()`` function within the ``setup.py`` script. Commands such as
 ``python setup.py bdist`` or ``python setup.py bdist_wheel`` generate a 
 distribution bundle and ``python setup.py install`` installs the distribution. 
index bce724a79fffbb5cb601c422ba412c87e787a46a..3c477ec114f7c780d47f11dea8ebc860faa60b5f 100644 (file)
@@ -3,9 +3,9 @@
 
 Under normal circumstances, the ``distutils`` assume that you are going to
 build a distribution of your project, not use it in its "raw" or "unbuilt"
-form.  If you were to use the ``distutils`` that way, you would have to rebuild
-and reinstall your project every time you made a change to it during
-development.
+form.  However, if you were to use the ``distutils`` to build a distribution,
+you would have to rebuild and reinstall your project every time you made a
+change to it during development.
 
 Another problem that sometimes comes up with the ``distutils`` is that you may
 need to do development on two related projects at the same time.  You may need
index 99b7f680753deda3929a711e2054af3cf5ec5456..c84f1dd9e89407c3ba7dee18d045bc008fa8b905 100644 (file)
@@ -697,7 +697,8 @@ class WorkingSet:
             keys2.append(dist.key)
         self._added_new(dist)
 
-    def resolve(self, requirements, env=None, installer=None,
+    # FIXME: 'WorkingSet.resolve' is too complex (11)
+    def resolve(self, requirements, env=None, installer=None,  # noqa: C901
                 replace_conflicting=False, extras=None):
         """List all distributions needed to (recursively) meet `requirements`
 
@@ -1746,7 +1747,8 @@ class ZipProvider(EggProvider):
         timestamp = time.mktime(date_time)
         return timestamp, size
 
-    def _extract_resource(self, manager, zip_path):
+    # FIXME: 'ZipProvider._extract_resource' is too complex (12)
+    def _extract_resource(self, manager, zip_path):  # noqa: C901
 
         if zip_path in self._index():
             for name in self._index()[zip_path]:
@@ -1984,7 +1986,7 @@ def find_eggs_in_zip(importer, path_item, only=False):
             dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
             for dist in dists:
                 yield dist
-        elif subitem.lower().endswith('.dist-info'):
+        elif subitem.lower().endswith(('.dist-info', '.egg-info')):
             subpath = os.path.join(path_item, subitem)
             submeta = EggMetadata(zipimport.zipimporter(subpath))
             submeta.egg_info = subpath
@@ -2859,7 +2861,8 @@ class Distribution:
         """Return the EntryPoint object for `group`+`name`, or ``None``"""
         return self.get_entry_map(group).get(name)
 
-    def insert_on(self, path, loc=None, replace=False):
+    # FIXME: 'Distribution.insert_on' is too complex (13)
+    def insert_on(self, path, loc=None, replace=False):  # noqa: C901
         """Ensure self.location is on path
 
         If replace=False (default):
index 4dc3beb2fa59313506e858d540b61b5e89d40abd..1fbb4fcc896a4e3c3eff795c6ed77d2c4e6d0a55 100644 (file)
@@ -54,6 +54,12 @@ class VendorImporter:
                 "distribution.".format(**locals())
             )
 
+    def create_module(self, spec):
+        return self.load_module(spec.name)
+
+    def exec_module(self, module):
+        pass
+
     def install(self):
         """
         Install this importer into sys.meta_path if not already present.
diff --git a/pkg_resources/tests/data/my-test-package-zip/my-test-package.zip b/pkg_resources/tests/data/my-test-package-zip/my-test-package.zip
new file mode 100644 (file)
index 0000000..81f9a01
Binary files /dev/null and b/pkg_resources/tests/data/my-test-package-zip/my-test-package.zip differ
index f9594422f24ec65a699c639a7170cea52a6d26e8..b01b4827a7d2949ba5c7172b6f29378f70c3ad1e 100644 (file)
@@ -32,3 +32,12 @@ class TestFindDistributions:
         assert [dist.project_name for dist in dists] == ['my-test-package']
         dists = pkg_resources.find_distributions(str(target_dir), only=True)
         assert not list(dists)
+
+    def test_zipped_sdist_one_level_removed(self, target_dir):
+        (TESTS_DATA_DIR / 'my-test-package-zip').copy(target_dir)
+        dists = pkg_resources.find_distributions(
+            str(target_dir / "my-test-package.zip"))
+        assert [dist.project_name for dist in dists] == ['my-test-package']
+        dists = pkg_resources.find_distributions(
+            str(target_dir / "my-test-package.zip"), only=True)
+        assert not list(dists)
index 658514d30a66394d7c0336fa5134f8d8125d5b46..0bc2a46f4fbac51eb2e884866fc7b7109bfbf813 100644 (file)
@@ -12,18 +12,16 @@ skip-string-normalization = true
 
 [tool.setuptools_scm]
 
-# jaraco/skeleton#22
-[tool.jaraco.pytest.plugins.black]
+[pytest.enabler.black]
 #addopts = "--black"
 
-# jaraco/skeleton#22
-[tool.jaraco.pytest.plugins.mypy]
+[pytest.enabler.mypy]
 #addopts = "--mypy"
 
-[tool.jaraco.pytest.plugins.flake8]
+[pytest.enabler.flake8]
 addopts = "--flake8"
 
-[tool.jaraco.pytest.plugins.cov]
+[pytest.enabler.cov]
 addopts = "--cov"
 
 [tool.towncrier]
index 451abe835ae1cf4e756d3accb9b57b9b743383cc..43c4f23b6f5408ffbdb1c57335fff741c8884171 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,7 +1,8 @@
 [metadata]
-license_file = LICENSE
+license_files =
+    LICENSE
 name = setuptools
-version = 51.2.0
+version = 51.3.0
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
 description = Easily download, build, install, upgrade, and uninstall Python packages
@@ -22,7 +23,7 @@ project_urls =
     Documentation = https://setuptools.readthedocs.io/
 
 [options]
-packages = find:
+packages = find_namespace:
 py_modules = easy_install
 # disabled as it causes tests to be included #2505
 # include_package_data = true
@@ -30,7 +31,12 @@ python_requires = >=3.6
 install_requires =
 
 [options.packages.find]
-exclude = *.tests
+exclude =
+        build*
+        docs*
+        tests*
+        *.tests
+        tools*
 
 [options.extras_require]
 testing =
@@ -41,8 +47,7 @@ testing =
        pytest-black >= 0.3.7; python_implementation != "PyPy"
        pytest-cov
        pytest-mypy; python_implementation != "PyPy"
-       # jaraco/skeleton#22
-       jaraco.test >= 3.2.0
+       pytest-enabler
 
        # local
     mock
index 0ce190b8cf7258ef7ac1d9d71d9293038f36d69d..0f70284822f50098e21ad439550cdbd4d298d011 100644 (file)
@@ -125,6 +125,56 @@ def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
                 os.chmod(target, unix_attributes)
 
 
+def _resolve_tar_file_or_dir(tar_obj, tar_member_obj):
+    """Resolve any links and extract link targets as normal files."""
+    while tar_member_obj is not None and (
+            tar_member_obj.islnk() or tar_member_obj.issym()):
+        linkpath = tar_member_obj.linkname
+        if tar_member_obj.issym():
+            base = posixpath.dirname(tar_member_obj.name)
+            linkpath = posixpath.join(base, linkpath)
+            linkpath = posixpath.normpath(linkpath)
+        tar_member_obj = tar_obj._getmember(linkpath)
+
+    is_file_or_dir = (
+        tar_member_obj is not None and
+        (tar_member_obj.isfile() or tar_member_obj.isdir())
+    )
+    if is_file_or_dir:
+        return tar_member_obj
+
+    raise LookupError('Got unknown file type')
+
+
+def _iter_open_tar(tar_obj, extract_dir, progress_filter):
+    """Emit member-destination pairs from a tar archive."""
+    # don't do any chowning!
+    tar_obj.chown = lambda *args: None
+
+    with contextlib.closing(tar_obj):
+        for member in tar_obj:
+            name = member.name
+            # don't extract absolute paths or ones with .. in them
+            if name.startswith('/') or '..' in name.split('/'):
+                continue
+
+            prelim_dst = os.path.join(extract_dir, *name.split('/'))
+
+            try:
+                member = _resolve_tar_file_or_dir(tar_obj, member)
+            except LookupError:
+                continue
+
+            final_dst = progress_filter(name, prelim_dst)
+            if not final_dst:
+                continue
+
+            if final_dst.endswith(os.sep):
+                final_dst = final_dst[:-1]
+
+            yield member, final_dst
+
+
 def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
     """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
 
@@ -138,38 +188,18 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
         raise UnrecognizedFormat(
             "%s is not a compressed or uncompressed tar file" % (filename,)
         ) from e
-    with contextlib.closing(tarobj):
-        # don't do any chowning!
-        tarobj.chown = lambda *args: None
-        for member in tarobj:
-            name = member.name
-            # don't extract absolute paths or ones with .. in them
-            if not name.startswith('/') and '..' not in name.split('/'):
-                prelim_dst = os.path.join(extract_dir, *name.split('/'))
-
-                # resolve any links and to extract the link targets as normal
-                # files
-                while member is not None and (
-                        member.islnk() or member.issym()):
-                    linkpath = member.linkname
-                    if member.issym():
-                        base = posixpath.dirname(member.name)
-                        linkpath = posixpath.join(base, linkpath)
-                        linkpath = posixpath.normpath(linkpath)
-                    member = tarobj._getmember(linkpath)
-
-                if member is not None and (member.isfile() or member.isdir()):
-                    final_dst = progress_filter(name, prelim_dst)
-                    if final_dst:
-                        if final_dst.endswith(os.sep):
-                            final_dst = final_dst[:-1]
-                        try:
-                            # XXX Ugh
-                            tarobj._extract_member(member, final_dst)
-                        except tarfile.ExtractError:
-                            # chown/chmod/mkfifo/mknode/makedev failed
-                            pass
-        return True
+
+    for member, final_dst in _iter_open_tar(
+            tarobj, extract_dir, progress_filter,
+    ):
+        try:
+            # XXX Ugh
+            tarobj._extract_member(member, final_dst)
+        except tarfile.ExtractError:
+            # chown/chmod/mkfifo/mknode/makedev failed
+            pass
+
+    return True
 
 
 extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
index a88efb45b8cfd243e6d9f35648cf0a14425552ad..206f2419ba1f3c96d8329378fe623058d7d4e11f 100644 (file)
@@ -153,7 +153,7 @@ class bdist_egg(Command):
         self.run_command(cmdname)
         return cmd
 
-    def run(self):
+    def run(self):  # noqa: C901  # is too complex (14)  # FIXME
         # Generate metadata first
         self.run_command("egg_info")
         # We run install_lib before install_data, because some data hacks
index 9ec83b7d8bab87b01ec04dbe225d8cba98022767..f1e487d4d21e9e706b3d97d77fcbf09207af2ed7 100644 (file)
@@ -226,7 +226,7 @@ class easy_install(Command):
         print(tmpl.format(**locals()))
         raise SystemExit()
 
-    def finalize_options(self):
+    def finalize_options(self):  # noqa: C901  # is too complex (25)  # FIXME
         self.version and self._render_version()
 
         py_version = sys.version.split()[0]
@@ -437,7 +437,7 @@ class easy_install(Command):
     def warn_deprecated_options(self):
         pass
 
-    def check_site_dir(self):
+    def check_site_dir(self):  # noqa: C901  # is too complex (12)  # FIXME
         """Verify that self.install_dir is .pth-capable dir, if needed"""
 
         instdir = normalize_path(self.install_dir)
@@ -713,7 +713,10 @@ class easy_install(Command):
             if getattr(self, attrname) is None:
                 setattr(self, attrname, scheme[key])
 
-    def process_distribution(self, requirement, dist, deps=True, *info):
+    # FIXME: 'easy_install.process_distribution' is too complex (12)
+    def process_distribution(  # noqa: C901
+            self, requirement, dist, deps=True, *info,
+    ):
         self.update_pth(dist)
         self.package_index.add(dist)
         if dist in self.local_index[dist.key]:
@@ -837,12 +840,19 @@ class easy_install(Command):
 
     def install_eggs(self, spec, dist_filename, tmpdir):
         # .egg dirs or files are already built, so just return them
-        if dist_filename.lower().endswith('.egg'):
-            return [self.install_egg(dist_filename, tmpdir)]
-        elif dist_filename.lower().endswith('.exe'):
-            return [self.install_exe(dist_filename, tmpdir)]
-        elif dist_filename.lower().endswith('.whl'):
-            return [self.install_wheel(dist_filename, tmpdir)]
+        installer_map = {
+            '.egg': self.install_egg,
+            '.exe': self.install_exe,
+            '.whl': self.install_wheel,
+        }
+        try:
+            install_dist = installer_map[
+                dist_filename.lower()[-4:]
+            ]
+        except KeyError:
+            pass
+        else:
+            return [install_dist(dist_filename, tmpdir)]
 
         # Anything else, try to extract and build
         setup_base = tmpdir
@@ -887,7 +897,8 @@ class easy_install(Command):
             metadata = EggMetadata(zipimport.zipimporter(egg_path))
         return Distribution.from_filename(egg_path, metadata=metadata)
 
-    def install_egg(self, egg_path, tmpdir):
+    # FIXME: 'easy_install.install_egg' is too complex (11)
+    def install_egg(self, egg_path, tmpdir):  # noqa: C901
         destination = os.path.join(
             self.install_dir,
             os.path.basename(egg_path),
@@ -986,7 +997,8 @@ class easy_install(Command):
         # install the .egg
         return self.install_egg(egg_path, tmpdir)
 
-    def exe_to_egg(self, dist_filename, egg_tmp):
+    # FIXME: 'easy_install.exe_to_egg' is too complex (12)
+    def exe_to_egg(self, dist_filename, egg_tmp):  # noqa: C901
         """Extract a bdist_wininst to the directories an egg would use"""
         # Check for .pth file and set up prefix translations
         prefixes = get_exe_prefixes(dist_filename)
@@ -1184,16 +1196,18 @@ class easy_install(Command):
         cfg_filename = os.path.join(base, 'setup.cfg')
         setopt.edit_config(cfg_filename, settings)
 
-    def update_pth(self, dist):
+    def update_pth(self, dist):  # noqa: C901  # is too complex (11)  # FIXME
         if self.pth_file is None:
             return
 
         for d in self.pth_file[dist.key]:  # drop old entries
-            if self.multi_version or d.location != dist.location:
-                log.info("Removing %s from easy-install.pth file", d)
-                self.pth_file.remove(d)
-                if d.location in self.shadow_path:
-                    self.shadow_path.remove(d.location)
+            if not self.multi_version and d.location == dist.location:
+                continue
+
+            log.info("Removing %s from easy-install.pth file", d)
+            self.pth_file.remove(d)
+            if d.location in self.shadow_path:
+                self.shadow_path.remove(d.location)
 
         if not self.multi_version:
             if dist.location in self.pth_file.paths:
@@ -1207,19 +1221,21 @@ class easy_install(Command):
                 if dist.location not in self.shadow_path:
                     self.shadow_path.append(dist.location)
 
-        if not self.dry_run:
+        if self.dry_run:
+            return
 
-            self.pth_file.save()
+        self.pth_file.save()
 
-            if dist.key == 'setuptools':
-                # Ensure that setuptools itself never becomes unavailable!
-                # XXX should this check for latest version?
-                filename = os.path.join(self.install_dir, 'setuptools.pth')
-                if os.path.islink(filename):
-                    os.unlink(filename)
-                f = open(filename, 'wt')
-                f.write(self.pth_file.make_relative(dist.location) + '\n')
-                f.close()
+        if dist.key != 'setuptools':
+            return
+
+        # Ensure that setuptools itself never becomes unavailable!
+        # XXX should this check for latest version?
+        filename = os.path.join(self.install_dir, 'setuptools.pth')
+        if os.path.islink(filename):
+            os.unlink(filename)
+        with open(filename, 'wt') as f:
+            f.write(self.pth_file.make_relative(dist.location) + '\n')
 
     def unpack_progress(self, src, dst):
         # Progress filter for unpacking
@@ -1360,58 +1376,63 @@ def get_site_dirs():
     if sys.exec_prefix != sys.prefix:
         prefixes.append(sys.exec_prefix)
     for prefix in prefixes:
-        if prefix:
-            if sys.platform in ('os2emx', 'riscos'):
-                sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
-            elif os.sep == '/':
-                sitedirs.extend([
-                    os.path.join(
-                        prefix,
-                        "lib",
-                        "python{}.{}".format(*sys.version_info),
-                        "site-packages",
-                    ),
-                    os.path.join(prefix, "lib", "site-python"),
-                ])
-            else:
-                sitedirs.extend([
+        if not prefix:
+            continue
+
+        if sys.platform in ('os2emx', 'riscos'):
+            sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
+        elif os.sep == '/':
+            sitedirs.extend([
+                os.path.join(
                     prefix,
-                    os.path.join(prefix, "lib", "site-packages"),
-                ])
-            if sys.platform == 'darwin':
-                # for framework builds *only* we add the standard Apple
-                # locations. Currently only per-user, but /Library and
-                # /Network/Library could be added too
-                if 'Python.framework' in prefix:
-                    home = os.environ.get('HOME')
-                    if home:
-                        home_sp = os.path.join(
-                            home,
-                            'Library',
-                            'Python',
-                            '{}.{}'.format(*sys.version_info),
-                            'site-packages',
-                        )
-                        sitedirs.append(home_sp)
+                    "lib",
+                    "python{}.{}".format(*sys.version_info),
+                    "site-packages",
+                ),
+                os.path.join(prefix, "lib", "site-python"),
+            ])
+        else:
+            sitedirs.extend([
+                prefix,
+                os.path.join(prefix, "lib", "site-packages"),
+            ])
+        if sys.platform != 'darwin':
+            continue
+
+        # for framework builds *only* we add the standard Apple
+        # locations. Currently only per-user, but /Library and
+        # /Network/Library could be added too
+        if 'Python.framework' not in prefix:
+            continue
+
+        home = os.environ.get('HOME')
+        if not home:
+            continue
+
+        home_sp = os.path.join(
+            home,
+            'Library',
+            'Python',
+            '{}.{}'.format(*sys.version_info),
+            'site-packages',
+        )
+        sitedirs.append(home_sp)
     lib_paths = get_path('purelib'), get_path('platlib')
-    for site_lib in lib_paths:
-        if site_lib not in sitedirs:
-            sitedirs.append(site_lib)
+
+    sitedirs.extend(s for s in lib_paths if s not in sitedirs)
 
     if site.ENABLE_USER_SITE:
         sitedirs.append(site.USER_SITE)
 
-    try:
+    with contextlib.suppress(AttributeError):
         sitedirs.extend(site.getsitepackages())
-    except AttributeError:
-        pass
 
     sitedirs = list(map(normalize_path, sitedirs))
 
     return sitedirs
 
 
-def expand_paths(inputs):
+def expand_paths(inputs):  # noqa: C901  # is too complex (11)  # FIXME
     """Yield sys.path directories that might contain "old-style" packages"""
 
     seen = {}
@@ -1443,13 +1464,18 @@ def expand_paths(inputs):
 
             # Yield existing non-dupe, non-import directory lines from it
             for line in lines:
-                if not line.startswith("import"):
-                    line = normalize_path(line.rstrip())
-                    if line not in seen:
-                        seen[line] = 1
-                        if not os.path.isdir(line):
-                            continue
-                        yield line, os.listdir(line)
+                if line.startswith("import"):
+                    continue
+
+                line = normalize_path(line.rstrip())
+                if line in seen:
+                    continue
+
+                seen[line] = 1
+                if not os.path.isdir(line):
+                    continue
+
+                yield line, os.listdir(line)
 
 
 def extract_wininst_cfg(dist_filename):
index 0b7ad677f2ad4fe8bf82e22c013452171097911d..bb472036022d8e7feaf6d2f87940e7d3e7c7697f 100644 (file)
@@ -8,6 +8,7 @@ from distutils.util import convert_path
 from distutils import log
 import distutils.errors
 import distutils.filelist
+import functools
 import os
 import re
 import sys
@@ -31,7 +32,7 @@ from setuptools.extern import packaging
 from setuptools import SetuptoolsDeprecationWarning
 
 
-def translate_pattern(glob):
+def translate_pattern(glob):  # noqa: C901  # is too complex (14)  # FIXME
     """
     Translate a file path glob like '*.txt' in to a regular expression.
     This differs from fnmatch.translate which allows wildcards to match
@@ -130,10 +131,12 @@ class InfoCommon:
         egg_info may be called more than once for a distribution,
         in which case the version string already contains all tags.
         """
-        return (
-            version if self.vtags and version.endswith(self.vtags)
-            else version + self.vtags
-        )
+        # Remove the tags if they exist. The tags maybe have been normalized
+        # (e.g. turning .dev into .dev0) so we can't just compare strings
+        base_version = parse_version(version).base_version
+
+        # Add the tags
+        return base_version + self.vtags
 
     def tags(self):
         version = ''
@@ -332,70 +335,74 @@ class FileList(_FileList):
         # patterns, (dir and patterns), or (dir_pattern).
         (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
 
+        action_map = {
+            'include': self.include,
+            'exclude': self.exclude,
+            'global-include': self.global_include,
+            'global-exclude': self.global_exclude,
+            'recursive-include': functools.partial(
+                self.recursive_include, dir,
+            ),
+            'recursive-exclude': functools.partial(
+                self.recursive_exclude, dir,
+            ),
+            'graft': self.graft,
+            'prune': self.prune,
+        }
+        log_map = {
+            'include': "warning: no files found matching '%s'",
+            'exclude': (
+                "warning: no previously-included files found "
+                "matching '%s'"
+            ),
+            'global-include': (
+                "warning: no files found matching '%s' "
+                "anywhere in distribution"
+            ),
+            'global-exclude': (
+                "warning: no previously-included files matching "
+                "'%s' found anywhere in distribution"
+            ),
+            'recursive-include': (
+                "warning: no files found matching '%s' "
+                "under directory '%s'"
+            ),
+            'recursive-exclude': (
+                "warning: no previously-included files matching "
+                "'%s' found under directory '%s'"
+            ),
+            'graft': "warning: no directories found matching '%s'",
+            'prune': "no previously-included directories found matching '%s'",
+        }
+
+        try:
+            process_action = action_map[action]
+        except KeyError:
+            raise DistutilsInternalError(
+                "this cannot happen: invalid action '{action!s}'".
+                format(action=action),
+            )
+
         # OK, now we know that the action is valid and we have the
         # right number of words on the line for that action -- so we
         # can proceed with minimal error-checking.
-        if action == 'include':
-            self.debug_print("include " + ' '.join(patterns))
-            for pattern in patterns:
-                if not self.include(pattern):
-                    log.warn("warning: no files found matching '%s'", pattern)
-
-        elif action == 'exclude':
-            self.debug_print("exclude " + ' '.join(patterns))
-            for pattern in patterns:
-                if not self.exclude(pattern):
-                    log.warn(("warning: no previously-included files "
-                              "found matching '%s'"), pattern)
-
-        elif action == 'global-include':
-            self.debug_print("global-include " + ' '.join(patterns))
-            for pattern in patterns:
-                if not self.global_include(pattern):
-                    log.warn(("warning: no files found matching '%s' "
-                              "anywhere in distribution"), pattern)
-
-        elif action == 'global-exclude':
-            self.debug_print("global-exclude " + ' '.join(patterns))
-            for pattern in patterns:
-                if not self.global_exclude(pattern):
-                    log.warn(("warning: no previously-included files matching "
-                              "'%s' found anywhere in distribution"),
-                             pattern)
-
-        elif action == 'recursive-include':
-            self.debug_print("recursive-include %s %s" %
-                             (dir, ' '.join(patterns)))
-            for pattern in patterns:
-                if not self.recursive_include(dir, pattern):
-                    log.warn(("warning: no files found matching '%s' "
-                              "under directory '%s'"),
-                             pattern, dir)
-
-        elif action == 'recursive-exclude':
-            self.debug_print("recursive-exclude %s %s" %
-                             (dir, ' '.join(patterns)))
-            for pattern in patterns:
-                if not self.recursive_exclude(dir, pattern):
-                    log.warn(("warning: no previously-included files matching "
-                              "'%s' found under directory '%s'"),
-                             pattern, dir)
-
-        elif action == 'graft':
-            self.debug_print("graft " + dir_pattern)
-            if not self.graft(dir_pattern):
-                log.warn("warning: no directories found matching '%s'",
-                         dir_pattern)
-
-        elif action == 'prune':
-            self.debug_print("prune " + dir_pattern)
-            if not self.prune(dir_pattern):
-                log.warn(("no previously-included directories found "
-                          "matching '%s'"), dir_pattern)
-
-        else:
-            raise DistutilsInternalError(
-                "this cannot happen: invalid action '%s'" % action)
+
+        action_is_recursive = action.startswith('recursive-')
+        if action in {'graft', 'prune'}:
+            patterns = [dir_pattern]
+        extra_log_args = (dir, ) if action_is_recursive else ()
+        log_tmpl = log_map[action]
+
+        self.debug_print(
+            ' '.join(
+                [action] +
+                ([dir] if action_is_recursive else []) +
+                patterns,
+            )
+        )
+        for pattern in patterns:
+            if not process_action(pattern):
+                log.warn(log_tmpl, pattern, *extra_log_args)
 
     def _remove_files(self, predicate):
         """
index 2c088ef8cb42a53e824fb1880614308a1fc84d1f..2d0aac333d35fc2c5650b6426349799986414553 100644 (file)
@@ -118,8 +118,15 @@ def read_pkg_file(self, file):
         self.obsoletes = None
 
 
+def single_line(val):
+    # quick and dirty validation for description pypa/setuptools#1390
+    if '\n' in val:
+        raise ValueError("newlines not allowed")
+    return val
+
+
 # Based on Python 3.5 version
-def write_pkg_file(self, file):
+def write_pkg_file(self, file):  # noqa: C901  # is too complex (14)  # FIXME
     """Write the PKG-INFO format data to a file object.
     """
     version = self.get_metadata_version()
@@ -130,7 +137,7 @@ def write_pkg_file(self, file):
     write_field('Metadata-Version', str(version))
     write_field('Name', self.get_name())
     write_field('Version', self.get_version())
-    write_field('Summary', self.get_description())
+    write_field('Summary', single_line(self.get_description()))
     write_field('Home-page', self.get_url())
 
     if version < StrictVersion('1.2'):
@@ -548,7 +555,8 @@ class Distribution(_Distribution):
         req.marker = None
         return req
 
-    def _parse_config_files(self, filenames=None):
+    # FIXME: 'Distribution._parse_config_files' is too complex (14)
+    def _parse_config_files(self, filenames=None):  # noqa: C901
         """
         Adapted from distutils.dist.Distribution.parse_config_files,
         this method provides the same functionality in subtly-improved
@@ -557,14 +565,12 @@ class Distribution(_Distribution):
         from configparser import ConfigParser
 
         # Ignore install directory options if we have a venv
-        if sys.prefix != sys.base_prefix:
-            ignore_options = [
-                'install-base', 'install-platbase', 'install-lib',
-                'install-platlib', 'install-purelib', 'install-headers',
-                'install-scripts', 'install-data', 'prefix', 'exec-prefix',
-                'home', 'user', 'root']
-        else:
-            ignore_options = []
+        ignore_options = [] if sys.prefix == sys.base_prefix else [
+            'install-base', 'install-platbase', 'install-lib',
+            'install-platlib', 'install-purelib', 'install-headers',
+            'install-scripts', 'install-data', 'prefix', 'exec-prefix',
+            'home', 'user', 'root',
+        ]
 
         ignore_options = frozenset(ignore_options)
 
@@ -585,32 +591,37 @@ class Distribution(_Distribution):
                 opt_dict = self.get_option_dict(section)
 
                 for opt in options:
-                    if opt != '__name__' and opt not in ignore_options:
-                        val = parser.get(section, opt)
-                        opt = opt.replace('-', '_')
-                        opt_dict[opt] = (filename, val)
+                    if opt == '__name__' or opt in ignore_options:
+                        continue
+
+                    val = parser.get(section, opt)
+                    opt = opt.replace('-', '_')
+                    opt_dict[opt] = (filename, val)
 
             # Make the ConfigParser forget everything (so we retain
             # the original filenames that options come from)
             parser.__init__()
 
+        if 'global' not in self.command_options:
+            return
+
         # If there was a "global" section in the config file, use it
         # to set Distribution options.
 
-        if 'global' in self.command_options:
-            for (opt, (src, val)) in self.command_options['global'].items():
-                alias = self.negative_opt.get(opt)
-                try:
-                    if alias:
-                        setattr(self, alias, not strtobool(val))
-                    elif opt in ('verbose', 'dry_run'):  # ugh!
-                        setattr(self, opt, strtobool(val))
-                    else:
-                        setattr(self, opt, val)
-                except ValueError as e:
-                    raise DistutilsOptionError(e) from e
+        for (opt, (src, val)) in self.command_options['global'].items():
+            alias = self.negative_opt.get(opt)
+            if alias:
+                val = not strtobool(val)
+            elif opt in ('verbose', 'dry_run'):  # ugh!
+                val = strtobool(val)
+
+            try:
+                setattr(self, alias or opt, val)
+            except ValueError as e:
+                raise DistutilsOptionError(e) from e
 
-    def _set_command_options(self, command_obj, option_dict=None):
+    # FIXME: 'Distribution._set_command_options' is too complex (14)
+    def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901
         """
         Set the options for 'command_obj' from 'option_dict'.  Basically
         this means copying elements of a dictionary ('option_dict') to
index b7f30dc2e38705618aad423bf862aeb7f17d9a87..399701a044e31b9677aae265ac04b08ef9334302 100644 (file)
@@ -54,6 +54,12 @@ class VendorImporter:
                 "distribution.".format(**locals())
             )
 
+    def create_module(self, spec):
+        return self.load_module(spec.name)
+
+    def exec_module(self, module):
+        pass
+
     def install(self):
         """
         Install this importer into sys.meta_path if not already present.
index 9d7cbc5da68da8605d271b9314befb206b87bca6..87062b8187fa4f74a8c4edbaa60bd9a8b2d506a4 100644 (file)
@@ -47,6 +47,8 @@ def iglob(pathname, recursive=False):
 
 def _iglob(pathname, recursive):
     dirname, basename = os.path.split(pathname)
+    glob_in_dir = glob2 if recursive and _isrecursive(basename) else glob1
+
     if not has_magic(pathname):
         if basename:
             if os.path.lexists(pathname):
@@ -56,13 +58,9 @@ def _iglob(pathname, recursive):
             if os.path.isdir(dirname):
                 yield pathname
         return
+
     if not dirname:
-        if recursive and _isrecursive(basename):
-            for x in glob2(dirname, basename):
-                yield x
-        else:
-            for x in glob1(dirname, basename):
-                yield x
+        yield from glob_in_dir(dirname, basename)
         return
     # `os.path.split()` returns the argument itself as a dirname if it is a
     # drive or UNC path.  Prevent an infinite recursion if a drive or UNC path
@@ -71,12 +69,7 @@ def _iglob(pathname, recursive):
         dirs = _iglob(dirname, recursive)
     else:
         dirs = [dirname]
-    if has_magic(basename):
-        if recursive and _isrecursive(basename):
-            glob_in_dir = glob2
-        else:
-            glob_in_dir = glob1
-    else:
+    if not has_magic(basename):
         glob_in_dir = glob0
     for dirname in dirs:
         for name in glob_in_dir(dirname, basename):
index e630b87479754867d5aaf4515a649fb50eadb80d..c5822a31f4d0bba506dc5df46dd787341fea3577 100644 (file)
@@ -51,7 +51,7 @@ def _legacy_fetch_build_egg(dist, req):
     return cmd.easy_install(req)
 
 
-def fetch_build_egg(dist, req):
+def fetch_build_egg(dist, req):  # noqa: C901  # is too complex (16)  # FIXME
     """Fetch an egg needed for building.
 
     Use pip/wheel to fetch/build a wheel."""
@@ -80,20 +80,17 @@ def fetch_build_egg(dist, req):
     if 'allow_hosts' in opts:
         raise DistutilsError('the `allow-hosts` option is not supported '
                              'when using pip to install requirements.')
-    if 'PIP_QUIET' in os.environ or 'PIP_VERBOSE' in os.environ:
-        quiet = False
-    else:
-        quiet = True
+    quiet = 'PIP_QUIET' not in os.environ and 'PIP_VERBOSE' not in os.environ
     if 'PIP_INDEX_URL' in os.environ:
         index_url = None
     elif 'index_url' in opts:
         index_url = opts['index_url'][1]
     else:
         index_url = None
-    if 'find_links' in opts:
-        find_links = _fixup_find_links(opts['find_links'][1])[:]
-    else:
-        find_links = []
+    find_links = (
+        _fixup_find_links(opts['find_links'][1])[:] if 'find_links' in opts
+        else []
+    )
     if dist.dependency_links:
         find_links.extend(dist.dependency_links)
     eggs_dir = os.path.realpath(dist.get_egg_cache_dir())
@@ -112,16 +109,12 @@ def fetch_build_egg(dist, req):
             cmd.append('--quiet')
         if index_url is not None:
             cmd.extend(('--index-url', index_url))
-        if find_links is not None:
-            for link in find_links:
-                cmd.extend(('--find-links', link))
+        for link in find_links or []:
+            cmd.extend(('--find-links', link))
         # If requirement is a PEP 508 direct URL, directly pass
         # the URL to pip, as `req @ url` does not work on the
         # command line.
-        if req.url:
-            cmd.append(req.url)
-        else:
-            cmd.append(str(req))
+        cmd.append(req.url or str(req))
         try:
             subprocess.check_call(cmd)
         except subprocess.CalledProcessError as e:
index 1ead72b421befb112f2808a8478948ee3a3cc89d..d5e0a9523259aa88eb95f84f2d63c3259a7ffd2f 100644 (file)
@@ -24,6 +24,7 @@ from io import open
 from os import listdir, pathsep
 from os.path import join, isfile, isdir, dirname
 import sys
+import contextlib
 import platform
 import itertools
 import subprocess
@@ -724,28 +725,23 @@ class SystemInfo:
         ms = self.ri.microsoft
         vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)
         vs_vers = []
-        for hkey in self.ri.HKEYS:
-            for key in vckeys:
-                try:
-                    bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
-                except (OSError, IOError):
-                    continue
-                with bkey:
-                    subkeys, values, _ = winreg.QueryInfoKey(bkey)
-                    for i in range(values):
-                        try:
-                            ver = float(winreg.EnumValue(bkey, i)[0])
-                            if ver not in vs_vers:
-                                vs_vers.append(ver)
-                        except ValueError:
-                            pass
-                    for i in range(subkeys):
-                        try:
-                            ver = float(winreg.EnumKey(bkey, i))
-                            if ver not in vs_vers:
-                                vs_vers.append(ver)
-                        except ValueError:
-                            pass
+        for hkey, key in itertools.product(self.ri.HKEYS, vckeys):
+            try:
+                bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
+            except (OSError, IOError):
+                continue
+            with bkey:
+                subkeys, values, _ = winreg.QueryInfoKey(bkey)
+                for i in range(values):
+                    with contextlib.suppress(ValueError):
+                        ver = float(winreg.EnumValue(bkey, i)[0])
+                        if ver not in vs_vers:
+                            vs_vers.append(ver)
+                for i in range(subkeys):
+                    with contextlib.suppress(ValueError):
+                        ver = float(winreg.EnumKey(bkey, i))
+                        if ver not in vs_vers:
+                            vs_vers.append(ver)
         return sorted(vs_vers)
 
     def find_programdata_vs_vers(self):
@@ -925,8 +921,8 @@ class SystemInfo:
         """
         return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
 
-    @property
-    def WindowsSdkDir(self):
+    @property  # noqa: C901
+    def WindowsSdkDir(self):  # noqa: C901  # is too complex (12)  # FIXME
         """
         Microsoft Windows SDK directory.
 
index 3979b131b566f37da370150dd0c0773c7f1c7ed6..123e9582b5d39ce86314fa2d1fc85ee2e8a2cf25 100644 (file)
@@ -320,7 +320,8 @@ class PackageIndex(Environment):
         else:
             self.opener = urllib.request.urlopen
 
-    def process_url(self, url, retrieve=False):
+    # FIXME: 'PackageIndex.process_url' is too complex (14)
+    def process_url(self, url, retrieve=False):  # noqa: C901
         """Evaluate a URL as a possible download, and maybe retrieve it"""
         if url in self.scanned_urls and not retrieve:
             return
@@ -428,49 +429,53 @@ class PackageIndex(Environment):
             dist.precedence = SOURCE_DIST
             self.add(dist)
 
+    def _scan(self, link):
+        # Process a URL to see if it's for a package page
+        NO_MATCH_SENTINEL = None, None
+        if not link.startswith(self.index_url):
+            return NO_MATCH_SENTINEL
+
+        parts = list(map(
+            urllib.parse.unquote, link[len(self.index_url):].split('/')
+        ))
+        if len(parts) != 2 or '#' in parts[1]:
+            return NO_MATCH_SENTINEL
+
+        # it's a package page, sanitize and index it
+        pkg = safe_name(parts[0])
+        ver = safe_version(parts[1])
+        self.package_pages.setdefault(pkg.lower(), {})[link] = True
+        return to_filename(pkg), to_filename(ver)
+
     def process_index(self, url, page):
         """Process the contents of a PyPI page"""
 
-        def scan(link):
-            # Process a URL to see if it's for a package page
-            if link.startswith(self.index_url):
-                parts = list(map(
-                    urllib.parse.unquote, link[len(self.index_url):].split('/')
-                ))
-                if len(parts) == 2 and '#' not in parts[1]:
-                    # it's a package page, sanitize and index it
-                    pkg = safe_name(parts[0])
-                    ver = safe_version(parts[1])
-                    self.package_pages.setdefault(pkg.lower(), {})[link] = True
-                    return to_filename(pkg), to_filename(ver)
-            return None, None
-
         # process an index page into the package-page index
         for match in HREF.finditer(page):
             try:
-                scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
+                self._scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
             except ValueError:
                 pass
 
-        pkg, ver = scan(url)  # ensure this page is in the page index
-        if pkg:
-            # process individual package page
-            for new_url in find_external_links(url, page):
-                # Process the found URL
-                base, frag = egg_info_for_url(new_url)
-                if base.endswith('.py') and not frag:
-                    if ver:
-                        new_url += '#egg=%s-%s' % (pkg, ver)
-                    else:
-                        self.need_version_info(url)
-                self.scan_url(new_url)
-
-            return PYPI_MD5.sub(
-                lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
-            )
-        else:
+        pkg, ver = self._scan(url)  # ensure this page is in the page index
+        if not pkg:
             return ""  # no sense double-scanning non-package pages
 
+        # process individual package page
+        for new_url in find_external_links(url, page):
+            # Process the found URL
+            base, frag = egg_info_for_url(new_url)
+            if base.endswith('.py') and not frag:
+                if ver:
+                    new_url += '#egg=%s-%s' % (pkg, ver)
+                else:
+                    self.need_version_info(url)
+            self.scan_url(new_url)
+
+        return PYPI_MD5.sub(
+            lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
+        )
+
     def need_version_info(self, url):
         self.scan_all(
             "Page at %s links to .py file(s) without version info; an index "
@@ -591,7 +596,7 @@ class PackageIndex(Environment):
                 spec = parse_requirement_arg(spec)
         return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
 
-    def fetch_distribution(
+    def fetch_distribution(  # noqa: C901  # is too complex (14)  # FIXME
             self, requirement, tmpdir, force_scan=False, source=False,
             develop_ok=False, local_index=None):
         """Obtain a distribution suitable for fulfilling `requirement`
@@ -762,7 +767,8 @@ class PackageIndex(Environment):
     def reporthook(self, url, filename, blocknum, blksize, size):
         pass  # no-op
 
-    def open_url(self, url, warning=None):
+    # FIXME:
+    def open_url(self, url, warning=None):  # noqa: C901  # is too complex (12)
         if url.startswith('file:'):
             return local_open(url)
         try:
index eac5e6560839876ded2674351db449234679cbbc..b58cca37c93c20d19ec8db2cd854f1da01f04d63 100644 (file)
@@ -56,7 +56,7 @@ if not CertificateError:
         pass
 
 
-if not match_hostname:
+if not match_hostname:  # noqa: C901  # 'If 59' is too complex (21)  # FIXME
 
     def _dnsname_match(dn, hostname, max_wildcards=1):
         """Matching according to RFC 6125, section 6.4.3
index 1047468b185a13ad3ad3c9570c6ec783fcd6f293..1d0f07e30671ffa555ba19d06d98bc60afd54da8 100644 (file)
@@ -6,6 +6,7 @@ import re
 import stat
 import time
 
+from setuptools.build_meta import prepare_metadata_for_build_wheel
 from setuptools.command.egg_info import (
     egg_info, manifest_maker, EggInfoDeprecationWarning, get_pkg_info_revision,
 )
@@ -19,6 +20,26 @@ from .textwrap import DALS
 from . import contexts
 
 
+def _run_egg_info_command(tmpdir_cwd, env, cmd=None, output=None):
+    environ = os.environ.copy().update(
+        HOME=env.paths['home'],
+    )
+    if cmd is None:
+        cmd = [
+            'egg_info',
+        ]
+    code, data = environment.run_setup_py(
+        cmd=cmd,
+        pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
+        data_stream=1,
+        env=environ,
+    )
+    assert not code, data
+
+    if output:
+        assert output in data
+
+
 class Environment(str):
     pass
 
@@ -132,7 +153,7 @@ class TestEggInfo:
     def test_expected_files_produced(self, tmpdir_cwd, env):
         self._create_project()
 
-        self._run_egg_info_command(tmpdir_cwd, env)
+        _run_egg_info_command(tmpdir_cwd, env)
         actual = os.listdir('foo.egg-info')
 
         expected = [
@@ -166,7 +187,7 @@ class TestEggInfo:
         # currently configured to use a subprocess, the actual traceback
         # object is lost and we need to parse it from stderr
         with pytest.raises(AssertionError) as exc:
-            self._run_egg_info_command(tmpdir_cwd, env)
+            _run_egg_info_command(tmpdir_cwd, env)
 
         # Hopefully this is not too fragile: the only argument to the
         # assertion error should be a traceback, ending with:
@@ -180,13 +201,13 @@ class TestEggInfo:
         """Ensure timestamps are updated when the command is re-run."""
         self._create_project()
 
-        self._run_egg_info_command(tmpdir_cwd, env)
+        _run_egg_info_command(tmpdir_cwd, env)
         timestamp_a = os.path.getmtime('foo.egg-info')
 
         # arbitrary sleep just to handle *really* fast systems
         time.sleep(.001)
 
-        self._run_egg_info_command(tmpdir_cwd, env)
+        _run_egg_info_command(tmpdir_cwd, env)
         timestamp_b = os.path.getmtime('foo.egg-info')
 
         assert timestamp_a != timestamp_b
@@ -201,7 +222,7 @@ class TestEggInfo:
                 'usage.rst': "Run 'hi'",
             }
         })
-        self._run_egg_info_command(tmpdir_cwd, env)
+        _run_egg_info_command(tmpdir_cwd, env)
         egg_info_dir = os.path.join('.', 'foo.egg-info')
         sources_txt = os.path.join(egg_info_dir, 'SOURCES.txt')
         with open(sources_txt) as f:
@@ -441,7 +462,7 @@ class TestEggInfo:
             self, tmpdir_cwd, env, requires, use_setup_cfg,
             expected_requires, install_cmd_kwargs):
         self._setup_script_with_requires(requires, use_setup_cfg)
-        self._run_egg_info_command(tmpdir_cwd, env, **install_cmd_kwargs)
+        _run_egg_info_command(tmpdir_cwd, env, **install_cmd_kwargs)
         egg_info_dir = os.path.join('.', 'foo.egg-info')
         requires_txt = os.path.join(egg_info_dir, 'requires.txt')
         if os.path.exists(requires_txt):
@@ -461,14 +482,14 @@ class TestEggInfo:
         req = 'install_requires={"fake-factory==0.5.2", "pytz"}'
         self._setup_script_with_requires(req)
         with pytest.raises(AssertionError):
-            self._run_egg_info_command(tmpdir_cwd, env)
+            _run_egg_info_command(tmpdir_cwd, env)
 
     def test_extras_require_with_invalid_marker(self, tmpdir_cwd, env):
         tmpl = 'extras_require={{":{marker}": ["barbazquux"]}},'
         req = tmpl.format(marker=self.invalid_marker)
         self._setup_script_with_requires(req)
         with pytest.raises(AssertionError):
-            self._run_egg_info_command(tmpdir_cwd, env)
+            _run_egg_info_command(tmpdir_cwd, env)
         assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
 
     def test_extras_require_with_invalid_marker_in_req(self, tmpdir_cwd, env):
@@ -476,7 +497,7 @@ class TestEggInfo:
         req = tmpl.format(marker=self.invalid_marker)
         self._setup_script_with_requires(req)
         with pytest.raises(AssertionError):
-            self._run_egg_info_command(tmpdir_cwd, env)
+            _run_egg_info_command(tmpdir_cwd, env)
         assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == []
 
     def test_provides_extra(self, tmpdir_cwd, env):
@@ -865,26 +886,22 @@ class TestEggInfo:
             sources = f.read().split('\n')
             assert 'setup.py' in sources
 
-    def _run_egg_info_command(self, tmpdir_cwd, env, cmd=None, output=None):
-        environ = os.environ.copy().update(
-            HOME=env.paths['home'],
-        )
-        if cmd is None:
-            cmd = [
-                'egg_info',
-            ]
-        code, data = environment.run_setup_py(
-            cmd=cmd,
-            pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]),
-            data_stream=1,
-            env=environ,
-        )
-        assert not code, data
-
-        if output:
-            assert output in data
-
-    def test_egg_info_tag_only_once(self, tmpdir_cwd, env):
+    @pytest.mark.parametrize(
+        ('make_metadata_path', 'run_command'),
+        [
+            (
+                lambda env: os.path.join('.', 'foo.egg-info', 'PKG-INFO'),
+                lambda tmpdir_cwd, env: _run_egg_info_command(tmpdir_cwd, env)
+            ),
+            (
+                lambda env: os.path.join(env, 'foo.dist-info', 'METADATA'),
+                lambda tmpdir_cwd, env: prepare_metadata_for_build_wheel(env)
+            )
+        ]
+    )
+    def test_egg_info_tag_only_once(
+            self, tmpdir_cwd, env, make_metadata_path, run_command
+    ):
         self._create_project()
         build_files({
             'setup.cfg': DALS("""
@@ -894,11 +911,10 @@ class TestEggInfo:
                               tag_svn_revision = 0
                               """),
         })
-        self._run_egg_info_command(tmpdir_cwd, env)
-        egg_info_dir = os.path.join('.', 'foo.egg-info')
-        with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file:
-            pkg_info_lines = pkginfo_file.read().split('\n')
-        assert 'Version: 0.0.0.dev0' in pkg_info_lines
+        run_command(tmpdir_cwd, env)
+        with open(make_metadata_path(env)) as metadata_file:
+            metadata_lines = metadata_file.read().split('\n')
+        assert 'Version: 0.0.0.dev0' in metadata_lines
 
     def test_get_pkg_info_revision_deprecated(self):
         pytest.warns(EggInfoDeprecationWarning, get_pkg_info_revision)
index dd8ec014cdb3598fe699ce3fcbabeb1d92e635ef..0938f8920d6e12cb78a2ba4edc3e28a8c5bf59b8 100644 (file)
@@ -138,6 +138,8 @@ Features include:
 - test against multiple Python versions
 - run on late (and updated) platform versions
 - automated releases of tagged commits
+- [automatic merging of PRs](https://github.com/marketplace/actions/merge-pull-requests) (requires [protecting branches with required status checks](https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/enabling-required-status-checks), [not possible through API](https://github.community/t/set-all-status-checks-to-be-required-as-branch-protection-using-the-github-api/119493))
+
 
 ### Continuous Deployments
 
diff --git a/tox.ini b/tox.ini
index 9680ec01008946d72db105632f19c1e60acfa179..8083d8c27b46082eb2672d2e1606462d9c4d9302 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -54,7 +54,7 @@ commands =
 [testenv:release]
 skip_install = True
 deps =
-       pep517>=0.5
+       build
        twine[keyring]>=1.13
        path
        jaraco.develop>=7.1
@@ -70,7 +70,7 @@ commands =
        python -c "import path; path.Path('dist').rmtree_p()"
        # unset tag_build and tag_date pypa/setuptools#2500
        python setup.py egg_info -Db "" saveopts
-       python -m pep517.build .
+       python -m build
        python -m twine upload dist/*
        python -m jaraco.develop.create-github-release
        python -m jaraco.tidelift.publish-release-notes