Imported Upstream version 42.0.0 upstream/42.0.0
authorDongHun Kwak <dh0128.kwak@samsung.com>
Tue, 29 Dec 2020 22:04:29 +0000 (07:04 +0900)
committerDongHun Kwak <dh0128.kwak@samsung.com>
Tue, 29 Dec 2020 22:04:29 +0000 (07:04 +0900)
52 files changed:
.bumpversion.cfg
CHANGES.rst
docs/setuptools.txt
pkg_resources/tests/data/my-test-package-source/setup.cfg [new file with mode: 0644]
pkg_resources/tests/data/my-test-package-source/setup.py [new file with mode: 0644]
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO [new file with mode: 0644]
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt [new file with mode: 0644]
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt [new file with mode: 0644]
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt [new file with mode: 0644]
pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe [new file with mode: 0644]
pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg [new file with mode: 0644]
pkg_resources/tests/test_find_distributions.py
pytest.ini
setup.cfg
setup.py
setuptools/_vendor/packaging/__about__.py
setuptools/_vendor/packaging/__init__.py
setuptools/_vendor/packaging/_compat.py
setuptools/_vendor/packaging/_structures.py
setuptools/_vendor/packaging/markers.py
setuptools/_vendor/packaging/requirements.py
setuptools/_vendor/packaging/specifiers.py
setuptools/_vendor/packaging/tags.py [new file with mode: 0644]
setuptools/_vendor/packaging/utils.py
setuptools/_vendor/packaging/version.py
setuptools/_vendor/vendored.txt
setuptools/command/__init__.py
setuptools/command/easy_install.py
setuptools/command/install.py
setuptools/command/register.py
setuptools/command/sdist.py
setuptools/command/upload.py
setuptools/config.py
setuptools/dist.py
setuptools/errors.py [new file with mode: 0644]
setuptools/glibc.py [deleted file]
setuptools/installer.py [new file with mode: 0644]
setuptools/pep425tags.py [deleted file]
setuptools/tests/server.py
setuptools/tests/test_easy_install.py
setuptools/tests/test_egg_info.py
setuptools/tests/test_glibc.py [deleted file]
setuptools/tests/test_integration.py
setuptools/tests/test_pep425tags.py [deleted file]
setuptools/tests/test_register.py
setuptools/tests/test_upload.py
setuptools/tests/test_virtualenv.py
setuptools/tests/test_wheel.py
setuptools/wheel.py
tests/requirements.txt
tools/tox_pip.py [new file with mode: 0644]
tox.ini

index 40db5b03f17b5952ba4d3d262b0f7e7a34a4584f..0551f1b044637b34188e0ff2f04ec2079f1c31a8 100644 (file)
@@ -1,5 +1,5 @@
 [bumpversion]
-current_version = 41.6.0
+current_version = 42.0.0
 commit = True
 tag = True
 
index ba7b4647c8ad1b200e79ae2de51ccd79a02eac6b..0a8696c2a86b1f0a5628092ef6821e1d93987e90 100644 (file)
@@ -1,3 +1,23 @@
+v42.0.0
+-------
+
+* #1830, #1909: Mark the easy_install script and setuptools command as deprecated, and use `pip <https://pip.pypa.io/en/stable/>`_ when available to fetch/build wheels for missing ``setup_requires``/``tests_require`` requirements, with the following differences in behavior:
+   * support for ``python_requires``
+   * better support for wheels (proper handling of priority with respect to PEP 425 tags)
+   * PEP 517/518 support
+   * eggs are not supported
+   * no support for the ``allow_hosts`` easy_install option (``index_url``/``find_links`` are still honored)
+   * pip environment variables are honored (and take precedence over easy_install options)
+* #1898: Removed the "upload" and "register" commands in favor of `twine <https://pypi.org/p/twine>`_.
+* #1767: Add support for the ``license_files`` option in ``setup.cfg`` to automatically
+  include multiple license files in a source distribution.
+* #1829: Update handling of wheels compatibility tags:
+  * add support for manylinux2010
+  * fix use of removed 'm' ABI flag in Python 3.8 on Windows
+* #1861: Fix empty namespace package installation from wheel.
+* #1877: Setuptools now exposes a new entry point hook "setuptools.finalize_distribution_options", enabling plugins like `setuptools_scm <https://pypi.org/project/setuptools_scm>`_ to configure options on the distribution at finalization time.
+
+
 v41.6.0
 -------
 
index 344ea5bc36fd57f6a2e67a6fc011bcc0887b91c1..c109e673df5e048898e4d47d91a112a35d84a504 100644 (file)
@@ -282,10 +282,11 @@ unless you need the associated ``setuptools`` feature.
 ``setup_requires``
     A string or list of strings specifying what other distributions need to
     be present in order for the *setup script* to run.  ``setuptools`` will
-    attempt to obtain these before processing the rest of the setup script or
-    commands.  This argument is needed if you are using distutils extensions as
-    part of your build process; for example, extensions that process setup()
-    arguments and turn them into EGG-INFO metadata files.
+    attempt to obtain these (using pip if available) before processing the
+    rest of the setup script or commands.  This argument is needed if you
+    are using distutils extensions as part of your build process; for
+    example, extensions that process setup() arguments and turn them into
+    EGG-INFO metadata files.
 
     (Note: projects listed in ``setup_requires`` will NOT be automatically
     installed on the system where the setup script is being run.  They are
@@ -332,10 +333,10 @@ unless you need the associated ``setuptools`` feature.
     needed to install it, you can use this option to specify them.  It should
     be a string or list of strings specifying what other distributions need to
     be present for the package's tests to run.  When you run the ``test``
-    command, ``setuptools`` will  attempt to obtain these.  Note that these
-    required projects will *not* be installed on the system where the tests
-    are run, but only downloaded to the project's setup directory if they're
-    not already installed locally.
+    command, ``setuptools`` will  attempt to obtain these (using pip if
+    available).  Note that these required projects will *not* be installed on
+    the system where the tests are run, but only downloaded to the project's setup
+    directory if they're not already installed locally.
 
     New in 41.5.0: Deprecated the test command.
 
@@ -1207,7 +1208,7 @@ the quoted part.
 Distributing a ``setuptools``-based project
 ===========================================
 
-Detailed instructions to distribute a setuptools project can be found at 
+Detailed instructions to distribute a setuptools project can be found at
 `Packaging project tutorials`_.
 
 .. _Packaging project tutorials: https://packaging.python.org/tutorials/packaging-projects/#generating-distribution-archives
@@ -1223,7 +1224,7 @@ setup.py is located::
 
 This will generate distribution archives in the `dist` directory.
 
-Before you upload the generated archives make sure you're registered on 
+Before you upload the generated archives make sure you're registered on
 https://test.pypi.org/account/register/. You will also need to verify your email
 to be able to upload any packages.
 You should install twine to be able to upload packages::
@@ -2087,16 +2088,13 @@ New in 41.5.0: Deprecated the test command.
 ``upload`` - Upload source and/or egg distributions to PyPI
 ===========================================================
 
-.. warning::
-    **upload** is deprecated in favor of using `twine
-    <https://pypi.org/p/twine>`_
-
-The ``upload`` command is implemented and `documented
-<https://docs.python.org/3.1/distutils/uploading.html>`_
-in distutils.
+The ``upload`` command was deprecated in version 40.0 and removed in version
+42.0. Use `twine <https://pypi.org/p/twine>`_ instead.
 
-New in 20.1: Added keyring support.
-New in 40.0: Deprecated the upload command.
+For  more information on the current best practices in uploading your packages
+to PyPI, see the Python Packaging User Guide's "Packaging Python Projects"
+tutorial specifically the section on `uploading the distribution archives
+<https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives>`_.
 
 
 -----------------------------------------
@@ -2276,6 +2274,7 @@ maintainer_email                maintainer-email   str
 classifiers                     classifier         file:, list-comma
 license                                            str
 license_file                                       str
+license_files                                      list-comma
 description                     summary            file:, str
 long_description                long-description   file:, str
 long_description_content_type                      str                38.6.0
@@ -2420,6 +2419,10 @@ script defines entry points for them!
 Adding ``setup()`` Arguments
 ----------------------------
 
+.. warning:: Adding arguments to setup is discouraged as such arguments
+   are only supported through imperative execution and not supported through
+   declarative config.
+
 Sometimes, your commands may need additional arguments to the ``setup()``
 call.  You can enable this by defining entry points in the
 ``distutils.setup_keywords`` group.  For example, if you wanted a ``setup()``
@@ -2471,6 +2474,25 @@ script using your extension lists your project in its ``setup_requires``
 argument.
 
 
+Customizing Distribution Options
+--------------------------------
+
+Plugins may wish to extend or alter the options on a Distribution object to
+suit the purposes of that project. For example, a tool that infers the
+``Distribution.version`` from SCM-metadata may need to hook into the
+option finalization. To enable this feature, Setuptools offers an entry
+point "setuptools.finalize_distribution_options". That entry point must
+be a callable taking one argument (the Distribution instance).
+
+If the callable has an ``.order`` property, that value will be used to
+determine the order in which the hook is called. Lower numbers are called
+first and the default is zero (0).
+
+Plugins may read, alter, and set properties on the distribution, but each
+plugin is encouraged to load the configuration/settings for their behavior
+independently.
+
+
 Adding new EGG-INFO Files
 -------------------------
 
diff --git a/pkg_resources/tests/data/my-test-package-source/setup.cfg b/pkg_resources/tests/data/my-test-package-source/setup.cfg
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/pkg_resources/tests/data/my-test-package-source/setup.py b/pkg_resources/tests/data/my-test-package-source/setup.py
new file mode 100644 (file)
index 0000000..fe80d28
--- /dev/null
@@ -0,0 +1,6 @@
+import setuptools
+setuptools.setup(
+    name="my-test-package",
+    version="1.0",
+    zip_safe=True,
+)
diff --git a/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/PKG-INFO
new file mode 100644 (file)
index 0000000..7328e3f
--- /dev/null
@@ -0,0 +1,10 @@
+Metadata-Version: 1.0
+Name: my-test-package
+Version: 1.0
+Summary: UNKNOWN
+Home-page: UNKNOWN
+Author: UNKNOWN
+Author-email: UNKNOWN
+License: UNKNOWN
+Description: UNKNOWN
+Platform: UNKNOWN
diff --git a/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt
new file mode 100644 (file)
index 0000000..3c4ee16
--- /dev/null
@@ -0,0 +1,7 @@
+setup.cfg
+setup.py
+my_test_package.egg-info/PKG-INFO
+my_test_package.egg-info/SOURCES.txt
+my_test_package.egg-info/dependency_links.txt
+my_test_package.egg-info/top_level.txt
+my_test_package.egg-info/zip-safe
\ No newline at end of file
diff --git a/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt
new file mode 100644 (file)
index 0000000..8b13789
--- /dev/null
@@ -0,0 +1 @@
+
diff --git a/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/top_level.txt
new file mode 100644 (file)
index 0000000..8b13789
--- /dev/null
@@ -0,0 +1 @@
+
diff --git a/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe b/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe
new file mode 100644 (file)
index 0000000..8b13789
--- /dev/null
@@ -0,0 +1 @@
+
diff --git a/pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg b/pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg
new file mode 100644 (file)
index 0000000..5115b89
Binary files /dev/null and b/pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg differ
index d735c5902ff811f9fd58fecd9a8d9644cb27c2f9..f9594422f24ec65a699c639a7170cea52a6d26e8 100644 (file)
@@ -1,17 +1,9 @@
-import subprocess
-import sys
-
+import py
 import pytest
 import pkg_resources
 
-SETUP_TEMPLATE = """
-import setuptools
-setuptools.setup(
-    name="my-test-package",
-    version="1.0",
-    zip_safe=True,
-)
-""".lstrip()
+
+TESTS_DATA_DIR = py.path.local(__file__).dirpath('data')
 
 
 class TestFindDistributions:
@@ -21,46 +13,22 @@ class TestFindDistributions:
         target_dir = tmpdir.mkdir('target')
         # place a .egg named directory in the target that is not an egg:
         target_dir.mkdir('not.an.egg')
-        return str(target_dir)
-
-    @pytest.fixture
-    def project_dir(self, tmpdir):
-        project_dir = tmpdir.mkdir('my-test-package')
-        (project_dir / "setup.py").write(SETUP_TEMPLATE)
-        return str(project_dir)
+        return target_dir
 
     def test_non_egg_dir_named_egg(self, target_dir):
-        dists = pkg_resources.find_distributions(target_dir)
+        dists = pkg_resources.find_distributions(str(target_dir))
         assert not list(dists)
 
-    def test_standalone_egg_directory(self, project_dir, target_dir):
-        # install this distro as an unpacked egg:
-        args = [
-            sys.executable,
-            '-c', 'from setuptools.command.easy_install import main; main()',
-            '-mNx',
-            '-d', target_dir,
-            '--always-unzip',
-            project_dir,
-        ]
-        subprocess.check_call(args)
-        dists = pkg_resources.find_distributions(target_dir)
+    def test_standalone_egg_directory(self, target_dir):
+        (TESTS_DATA_DIR / 'my-test-package_unpacked-egg').copy(target_dir)
+        dists = pkg_resources.find_distributions(str(target_dir))
         assert [dist.project_name for dist in dists] == ['my-test-package']
-        dists = pkg_resources.find_distributions(target_dir, only=True)
+        dists = pkg_resources.find_distributions(str(target_dir), only=True)
         assert not list(dists)
 
-    def test_zipped_egg(self, project_dir, target_dir):
-        # install this distro as an unpacked egg:
-        args = [
-            sys.executable,
-            '-c', 'from setuptools.command.easy_install import main; main()',
-            '-mNx',
-            '-d', target_dir,
-            '--zip-ok',
-            project_dir,
-        ]
-        subprocess.check_call(args)
-        dists = pkg_resources.find_distributions(target_dir)
+    def test_zipped_egg(self, target_dir):
+        (TESTS_DATA_DIR / 'my-test-package_zipped-egg').copy(target_dir)
+        dists = pkg_resources.find_distributions(str(target_dir))
         assert [dist.project_name for dist in dists] == ['my-test-package']
-        dists = pkg_resources.find_distributions(target_dir, only=True)
+        dists = pkg_resources.find_distributions(str(target_dir), only=True)
         assert not list(dists)
index 612fb91f631b8098ddd98cab761ff62ec006bf63..0bc1ec01140cec2dd74577314cd22091ee787a8a 100644 (file)
@@ -1,7 +1,10 @@
 [pytest]
 addopts=--doctest-modules --doctest-glob=pkg_resources/api_tests.txt -r sxX
-norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern .*
+norecursedirs=dist build *.egg setuptools/extern pkg_resources/extern pkg_resources/tests/data tools .*
 flake8-ignore =
     setuptools/site-patch.py F821
     setuptools/py*compat.py F811
 doctest_optionflags=ELLIPSIS ALLOW_UNICODE
+filterwarnings =
+       # https://github.com/pypa/setuptools/issues/1823
+       ignore:bdist_wininst command is deprecated
index 42a3d86c6cc1cec3da1cb34e3899ce81cc67f711..c0aa35bae5f4a3de08b031f9e1ae4159f5538b85 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -19,7 +19,7 @@ universal = 1
 
 [metadata]
 name = setuptools
-version = 41.6.0
+version = 42.0.0
 description = Easily download, build, install, upgrade, and uninstall Python packages
 author = Python Packaging Authority
 author_email = distutils-sig@python.org
index d97895fcc09827f7a0fed48d42a6bb3fed0b1bcd..277b6640830e7d6e6fc49309bc60a15fae774fa2 100755 (executable)
--- a/setup.py
+++ b/setup.py
@@ -89,6 +89,13 @@ setup_params = dict(
             "%(cmd)s = setuptools.command.%(cmd)s:%(cmd)s" % locals()
             for cmd in read_commands()
         ],
+        "setuptools.finalize_distribution_options": [
+            "parent_finalize = setuptools.dist:_Distribution.finalize_options",
+            "features = setuptools.dist:Distribution._finalize_feature_opts",
+            "keywords = setuptools.dist:Distribution._finalize_setup_keywords",
+            "2to3_doctests = "
+            "setuptools.dist:Distribution._finalize_2to3_doctests",
+        ],
         "distutils.setup_keywords": [
             "eager_resources        = setuptools.dist:assert_string_list",
             "namespace_packages     = setuptools.dist:check_nsp",
index 95d330ef823aa2e12f7846bc63c0955b25df6029..dc95138d049ba3194964d528b552a6d1514fa382 100644 (file)
@@ -4,18 +4,24 @@
 from __future__ import absolute_import, division, print_function
 
 __all__ = [
-    "__title__", "__summary__", "__uri__", "__version__", "__author__",
-    "__email__", "__license__", "__copyright__",
+    "__title__",
+    "__summary__",
+    "__uri__",
+    "__version__",
+    "__author__",
+    "__email__",
+    "__license__",
+    "__copyright__",
 ]
 
 __title__ = "packaging"
 __summary__ = "Core utilities for Python packages"
 __uri__ = "https://github.com/pypa/packaging"
 
-__version__ = "16.8"
+__version__ = "19.2"
 
 __author__ = "Donald Stufft and individual contributors"
 __email__ = "donald@stufft.io"
 
 __license__ = "BSD or Apache License, Version 2.0"
-__copyright__ = "Copyright 2014-2016 %s" % __author__
+__copyright__ = "Copyright 2014-2019 %s" % __author__
index 5ee6220203e5425f900fb5a43676c24ea377c2fa..a0cf67df5245be16a020ca048832e180f7ce8661 100644 (file)
@@ -4,11 +4,23 @@
 from __future__ import absolute_import, division, print_function
 
 from .__about__ import (
-    __author__, __copyright__, __email__, __license__, __summary__, __title__,
-    __uri__, __version__
+    __author__,
+    __copyright__,
+    __email__,
+    __license__,
+    __summary__,
+    __title__,
+    __uri__,
+    __version__,
 )
 
 __all__ = [
-    "__title__", "__summary__", "__uri__", "__version__", "__author__",
-    "__email__", "__license__", "__copyright__",
+    "__title__",
+    "__summary__",
+    "__uri__",
+    "__version__",
+    "__author__",
+    "__email__",
+    "__license__",
+    "__copyright__",
 ]
index 210bb80b7e7b64cb79f7e7cdf3e42819fe3471fe..25da473c196855ad59a6d2d785ef1ddef49795be 100644 (file)
@@ -12,9 +12,9 @@ PY3 = sys.version_info[0] == 3
 # flake8: noqa
 
 if PY3:
-    string_types = str,
+    string_types = (str,)
 else:
-    string_types = basestring,
+    string_types = (basestring,)
 
 
 def with_metaclass(meta, *bases):
@@ -27,4 +27,5 @@ def with_metaclass(meta, *bases):
     class metaclass(meta):
         def __new__(cls, name, this_bases, d):
             return meta(name, bases, d)
-    return type.__new__(metaclass, 'temporary_class', (), {})
+
+    return type.__new__(metaclass, "temporary_class", (), {})
index ccc27861c3a4d9efaa3db753c77c4515a627bd98..68dcca634d8e3f0081bad2f9ae5e653a2942db68 100644 (file)
@@ -5,7 +5,6 @@ from __future__ import absolute_import, division, print_function
 
 
 class Infinity(object):
-
     def __repr__(self):
         return "Infinity"
 
@@ -33,11 +32,11 @@ class Infinity(object):
     def __neg__(self):
         return NegativeInfinity
 
+
 Infinity = Infinity()
 
 
 class NegativeInfinity(object):
-
     def __repr__(self):
         return "-Infinity"
 
@@ -65,4 +64,5 @@ class NegativeInfinity(object):
     def __neg__(self):
         return Infinity
 
+
 NegativeInfinity = NegativeInfinity()
index 031332a3058f4bc47eadd320e63a2cdc6b76df25..4bdfdb24f2096eac046bb9a576065bb96cfd476e 100644 (file)
@@ -17,8 +17,11 @@ from .specifiers import Specifier, InvalidSpecifier
 
 
 __all__ = [
-    "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
-    "Marker", "default_environment",
+    "InvalidMarker",
+    "UndefinedComparison",
+    "UndefinedEnvironmentName",
+    "Marker",
+    "default_environment",
 ]
 
 
@@ -42,7 +45,6 @@ class UndefinedEnvironmentName(ValueError):
 
 
 class Node(object):
-
     def __init__(self, value):
         self.value = value
 
@@ -57,62 +59,52 @@ class Node(object):
 
 
 class Variable(Node):
-
     def serialize(self):
         return str(self)
 
 
 class Value(Node):
-
     def serialize(self):
         return '"{0}"'.format(self)
 
 
 class Op(Node):
-
     def serialize(self):
         return str(self)
 
 
 VARIABLE = (
-    L("implementation_version") |
-    L("platform_python_implementation") |
-    L("implementation_name") |
-    L("python_full_version") |
-    L("platform_release") |
-    L("platform_version") |
-    L("platform_machine") |
-    L("platform_system") |
-    L("python_version") |
-    L("sys_platform") |
-    L("os_name") |
-    L("os.name") |  # PEP-345
-    L("sys.platform") |  # PEP-345
-    L("platform.version") |  # PEP-345
-    L("platform.machine") |  # PEP-345
-    L("platform.python_implementation") |  # PEP-345
-    L("python_implementation") |  # undocumented setuptools legacy
-    L("extra")
+    L("implementation_version")
+    | L("platform_python_implementation")
+    | L("implementation_name")
+    | L("python_full_version")
+    | L("platform_release")
+    | L("platform_version")
+    | L("platform_machine")
+    | L("platform_system")
+    | L("python_version")
+    | L("sys_platform")
+    | L("os_name")
+    | L("os.name")
+    | L("sys.platform")  # PEP-345
+    | L("platform.version")  # PEP-345
+    | L("platform.machine")  # PEP-345
+    | L("platform.python_implementation")  # PEP-345
+    | L("python_implementation")  # PEP-345
+    | L("extra")  # undocumented setuptools legacy
 )
 ALIASES = {
-    'os.name': 'os_name',
-    'sys.platform': 'sys_platform',
-    'platform.version': 'platform_version',
-    'platform.machine': 'platform_machine',
-    'platform.python_implementation': 'platform_python_implementation',
-    'python_implementation': 'platform_python_implementation'
+    "os.name": "os_name",
+    "sys.platform": "sys_platform",
+    "platform.version": "platform_version",
+    "platform.machine": "platform_machine",
+    "platform.python_implementation": "platform_python_implementation",
+    "python_implementation": "platform_python_implementation",
 }
 VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
 
 VERSION_CMP = (
-    L("===") |
-    L("==") |
-    L(">=") |
-    L("<=") |
-    L("!=") |
-    L("~=") |
-    L(">") |
-    L("<")
+    L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
 )
 
 MARKER_OP = VERSION_CMP | L("not in") | L("in")
@@ -152,8 +144,11 @@ def _format_marker(marker, first=True):
     # where the single item is itself it's own list. In that case we want skip
     # the rest of this function so that we don't get extraneous () on the
     # outside.
-    if (isinstance(marker, list) and len(marker) == 1 and
-            isinstance(marker[0], (list, tuple))):
+    if (
+        isinstance(marker, list)
+        and len(marker) == 1
+        and isinstance(marker[0], (list, tuple))
+    ):
         return _format_marker(marker[0])
 
     if isinstance(marker, list):
@@ -239,20 +234,20 @@ def _evaluate_markers(markers, environment):
 
 
 def format_full_version(info):
-    version = '{0.major}.{0.minor}.{0.micro}'.format(info)
+    version = "{0.major}.{0.minor}.{0.micro}".format(info)
     kind = info.releaselevel
-    if kind != 'final':
+    if kind != "final":
         version += kind[0] + str(info.serial)
     return version
 
 
 def default_environment():
-    if hasattr(sys, 'implementation'):
+    if hasattr(sys, "implementation"):
         iver = format_full_version(sys.implementation.version)
         implementation_name = sys.implementation.name
     else:
-        iver = '0'
-        implementation_name = ''
+        iver = "0"
+        implementation_name = ""
 
     return {
         "implementation_name": implementation_name,
@@ -264,19 +259,19 @@ def default_environment():
         "platform_version": platform.version(),
         "python_full_version": platform.python_version(),
         "platform_python_implementation": platform.python_implementation(),
-        "python_version": platform.python_version()[:3],
+        "python_version": ".".join(platform.python_version_tuple()[:2]),
         "sys_platform": sys.platform,
     }
 
 
 class Marker(object):
-
     def __init__(self, marker):
         try:
             self._markers = _coerce_parse_result(MARKER.parseString(marker))
         except ParseException as e:
             err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
-                marker, marker[e.loc:e.loc + 8])
+                marker, marker[e.loc : e.loc + 8]
+            )
             raise InvalidMarker(err_str)
 
     def __str__(self):
index 5b493416f215081b46c901d76c3a4b4648a32296..8a0c2cb9be06e633b26c7205d6efe42827835910 100644 (file)
@@ -38,8 +38,8 @@ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
 NAME = IDENTIFIER("name")
 EXTRA = IDENTIFIER
 
-URI = Regex(r'[^ ]+')("url")
-URL = (AT + URI)
+URI = Regex(r"[^ ]+")("url")
+URL = AT + URI
 
 EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
 EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
@@ -48,28 +48,31 @@ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
 VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
 
 VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
-VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
-                       joinString=",", adjacent=False)("_raw_spec")
+VERSION_MANY = Combine(
+    VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
+)("_raw_spec")
 _VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
-_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
 
 VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
 VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
 
 MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
 MARKER_EXPR.setParseAction(
-    lambda s, l, t: Marker(s[t._original_start:t._original_end])
+    lambda s, l, t: Marker(s[t._original_start : t._original_end])
 )
-MARKER_SEPERATOR = SEMICOLON
-MARKER = MARKER_SEPERATOR + MARKER_EXPR
+MARKER_SEPARATOR = SEMICOLON
+MARKER = MARKER_SEPARATOR + MARKER_EXPR
 
 VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
 URL_AND_MARKER = URL + Optional(MARKER)
 
-NAMED_REQUIREMENT = \
-    NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
 
 REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
+# issue #104
+REQUIREMENT.parseString("x[]")
 
 
 class Requirement(object):
@@ -90,15 +93,21 @@ class Requirement(object):
             req = REQUIREMENT.parseString(requirement_string)
         except ParseException as e:
             raise InvalidRequirement(
-                "Invalid requirement, parse error at \"{0!r}\"".format(
-                    requirement_string[e.loc:e.loc + 8]))
+                'Parse error at "{0!r}": {1}'.format(
+                    requirement_string[e.loc : e.loc + 8], e.msg
+                )
+            )
 
         self.name = req.name
         if req.url:
             parsed_url = urlparse.urlparse(req.url)
-            if not (parsed_url.scheme and parsed_url.netloc) or (
-                    not parsed_url.scheme and not parsed_url.netloc):
-                raise InvalidRequirement("Invalid URL given")
+            if parsed_url.scheme == "file":
+                if urlparse.urlunparse(parsed_url) != req.url:
+                    raise InvalidRequirement("Invalid URL given")
+            elif not (parsed_url.scheme and parsed_url.netloc) or (
+                not parsed_url.scheme and not parsed_url.netloc
+            ):
+                raise InvalidRequirement("Invalid URL: {0}".format(req.url))
             self.url = req.url
         else:
             self.url = None
@@ -117,6 +126,8 @@ class Requirement(object):
 
         if self.url:
             parts.append("@ {0}".format(self.url))
+            if self.marker:
+                parts.append(" ")
 
         if self.marker:
             parts.append("; {0}".format(self.marker))
index 7f5a76cfd63f47dcce29b3ea82f59d10f4e8d771..743576a080a0af8d0995f307ea6afc645b13ca61 100644 (file)
@@ -19,7 +19,6 @@ class InvalidSpecifier(ValueError):
 
 
 class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
-
     @abc.abstractmethod
     def __str__(self):
         """
@@ -84,10 +83,7 @@ class _IndividualSpecifier(BaseSpecifier):
         if not match:
             raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
 
-        self._spec = (
-            match.group("operator").strip(),
-            match.group("version").strip(),
-        )
+        self._spec = (match.group("operator").strip(), match.group("version").strip())
 
         # Store whether or not this Specifier should accept prereleases
         self._prereleases = prereleases
@@ -99,11 +95,7 @@ class _IndividualSpecifier(BaseSpecifier):
             else ""
         )
 
-        return "<{0}({1!r}{2})>".format(
-            self.__class__.__name__,
-            str(self),
-            pre,
-        )
+        return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre)
 
     def __str__(self):
         return "{0}{1}".format(*self._spec)
@@ -194,11 +186,12 @@ class _IndividualSpecifier(BaseSpecifier):
                 # If our version is a prerelease, and we were not set to allow
                 # prereleases, then we'll store it for later incase nothing
                 # else matches this specifier.
-                if (parsed_version.is_prerelease and not
-                        (prereleases or self.prereleases)):
+                if parsed_version.is_prerelease and not (
+                    prereleases or self.prereleases
+                ):
                     found_prereleases.append(version)
                 # Either this is not a prerelease, or we should have been
-                # accepting prereleases from the begining.
+                # accepting prereleases from the beginning.
                 else:
                     yielded = True
                     yield version
@@ -213,8 +206,7 @@ class _IndividualSpecifier(BaseSpecifier):
 
 class LegacySpecifier(_IndividualSpecifier):
 
-    _regex_str = (
-        r"""
+    _regex_str = r"""
         (?P<operator>(==|!=|<=|>=|<|>))
         \s*
         (?P<version>
@@ -225,10 +217,8 @@ class LegacySpecifier(_IndividualSpecifier):
                       # them, and a comma since it's a version separator.
         )
         """
-    )
 
-    _regex = re.compile(
-        r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
 
     _operators = {
         "==": "equal",
@@ -269,13 +259,13 @@ def _require_version_compare(fn):
         if not isinstance(prospective, Version):
             return False
         return fn(self, prospective, spec)
+
     return wrapped
 
 
 class Specifier(_IndividualSpecifier):
 
-    _regex_str = (
-        r"""
+    _regex_str = r"""
         (?P<operator>(~=|==|!=|<=|>=|<|>|===))
         (?P<version>
             (?:
@@ -367,10 +357,8 @@ class Specifier(_IndividualSpecifier):
             )
         )
         """
-    )
 
-    _regex = re.compile(
-        r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
 
     _operators = {
         "~=": "compatible",
@@ -397,8 +385,7 @@ class Specifier(_IndividualSpecifier):
         prefix = ".".join(
             list(
                 itertools.takewhile(
-                    lambda x: (not x.startswith("post") and not
-                               x.startswith("dev")),
+                    lambda x: (not x.startswith("post") and not x.startswith("dev")),
                     _version_split(spec),
                 )
             )[:-1]
@@ -407,8 +394,9 @@ class Specifier(_IndividualSpecifier):
         # Add the prefix notation to the end of our string
         prefix += ".*"
 
-        return (self._get_operator(">=")(prospective, spec) and
-                self._get_operator("==")(prospective, prefix))
+        return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+            prospective, prefix
+        )
 
     @_require_version_compare
     def _compare_equal(self, prospective, spec):
@@ -428,7 +416,7 @@ class Specifier(_IndividualSpecifier):
             # Shorten the prospective version to be the same length as the spec
             # so that we can determine if the specifier is a prefix of the
             # prospective version or not.
-            prospective = prospective[:len(spec)]
+            prospective = prospective[: len(spec)]
 
             # Pad out our two sides with zeros so that they both equal the same
             # length.
@@ -503,7 +491,7 @@ class Specifier(_IndividualSpecifier):
                 return False
 
         # Ensure that we do not allow a local version of the version mentioned
-        # in the specifier, which is techincally greater than, to match.
+        # in the specifier, which is technically greater than, to match.
         if prospective.local is not None:
             if Version(prospective.base_version) == Version(spec.base_version):
                 return False
@@ -567,27 +555,17 @@ def _pad_version(left, right):
     right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
 
     # Get the rest of our versions
-    left_split.append(left[len(left_split[0]):])
-    right_split.append(right[len(right_split[0]):])
+    left_split.append(left[len(left_split[0]) :])
+    right_split.append(right[len(right_split[0]) :])
 
     # Insert our padding
-    left_split.insert(
-        1,
-        ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
-    )
-    right_split.insert(
-        1,
-        ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
-    )
+    left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+    right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
 
-    return (
-        list(itertools.chain(*left_split)),
-        list(itertools.chain(*right_split)),
-    )
+    return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
 
 
 class SpecifierSet(BaseSpecifier):
-
     def __init__(self, specifiers="", prereleases=None):
         # Split on , to break each indidivual specifier into it's own item, and
         # strip each item to remove leading/trailing whitespace.
@@ -721,10 +699,7 @@ class SpecifierSet(BaseSpecifier):
         # given version is contained within all of them.
         # Note: This use of all() here means that an empty set of specifiers
         #       will always return True, this is an explicit design decision.
-        return all(
-            s.contains(item, prereleases=prereleases)
-            for s in self._specs
-        )
+        return all(s.contains(item, prereleases=prereleases) for s in self._specs)
 
     def filter(self, iterable, prereleases=None):
         # Determine if we're forcing a prerelease or not, if we're not forcing
diff --git a/setuptools/_vendor/packaging/tags.py b/setuptools/_vendor/packaging/tags.py
new file mode 100644 (file)
index 0000000..ec9942f
--- /dev/null
@@ -0,0 +1,404 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import
+
+import distutils.util
+
+try:
+    from importlib.machinery import EXTENSION_SUFFIXES
+except ImportError:  # pragma: no cover
+    import imp
+
+    EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()]
+    del imp
+import platform
+import re
+import sys
+import sysconfig
+import warnings
+
+
+INTERPRETER_SHORT_NAMES = {
+    "python": "py",  # Generic.
+    "cpython": "cp",
+    "pypy": "pp",
+    "ironpython": "ip",
+    "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
+
+
+class Tag(object):
+
+    __slots__ = ["_interpreter", "_abi", "_platform"]
+
+    def __init__(self, interpreter, abi, platform):
+        self._interpreter = interpreter.lower()
+        self._abi = abi.lower()
+        self._platform = platform.lower()
+
+    @property
+    def interpreter(self):
+        return self._interpreter
+
+    @property
+    def abi(self):
+        return self._abi
+
+    @property
+    def platform(self):
+        return self._platform
+
+    def __eq__(self, other):
+        return (
+            (self.platform == other.platform)
+            and (self.abi == other.abi)
+            and (self.interpreter == other.interpreter)
+        )
+
+    def __hash__(self):
+        return hash((self._interpreter, self._abi, self._platform))
+
+    def __str__(self):
+        return "{}-{}-{}".format(self._interpreter, self._abi, self._platform)
+
+    def __repr__(self):
+        return "<{self} @ {self_id}>".format(self=self, self_id=id(self))
+
+
+def parse_tag(tag):
+    tags = set()
+    interpreters, abis, platforms = tag.split("-")
+    for interpreter in interpreters.split("."):
+        for abi in abis.split("."):
+            for platform_ in platforms.split("."):
+                tags.add(Tag(interpreter, abi, platform_))
+    return frozenset(tags)
+
+
+def _normalize_string(string):
+    return string.replace(".", "_").replace("-", "_")
+
+
+def _cpython_interpreter(py_version):
+    # TODO: Is using py_version_nodot for interpreter version critical?
+    return "cp{major}{minor}".format(major=py_version[0], minor=py_version[1])
+
+
+def _cpython_abis(py_version):
+    abis = []
+    version = "{}{}".format(*py_version[:2])
+    debug = pymalloc = ucs4 = ""
+    with_debug = sysconfig.get_config_var("Py_DEBUG")
+    has_refcount = hasattr(sys, "gettotalrefcount")
+    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+    # extension modules is the best option.
+    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+    if with_debug or (with_debug is None and (has_refcount or has_ext)):
+        debug = "d"
+    if py_version < (3, 8):
+        with_pymalloc = sysconfig.get_config_var("WITH_PYMALLOC")
+        if with_pymalloc or with_pymalloc is None:
+            pymalloc = "m"
+        if py_version < (3, 3):
+            unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE")
+            if unicode_size == 4 or (
+                unicode_size is None and sys.maxunicode == 0x10FFFF
+            ):
+                ucs4 = "u"
+    elif debug:
+        # Debug builds can also load "normal" extension modules.
+        # We can also assume no UCS-4 or pymalloc requirement.
+        abis.append("cp{version}".format(version=version))
+    abis.insert(
+        0,
+        "cp{version}{debug}{pymalloc}{ucs4}".format(
+            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
+        ),
+    )
+    return abis
+
+
+def _cpython_tags(py_version, interpreter, abis, platforms):
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+    for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms):
+        yield tag
+    for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms):
+        yield tag
+    # PEP 384 was first implemented in Python 3.2.
+    for minor_version in range(py_version[1] - 1, 1, -1):
+        for platform_ in platforms:
+            interpreter = "cp{major}{minor}".format(
+                major=py_version[0], minor=minor_version
+            )
+            yield Tag(interpreter, "abi3", platform_)
+
+
+def _pypy_interpreter():
+    return "pp{py_major}{pypy_major}{pypy_minor}".format(
+        py_major=sys.version_info[0],
+        pypy_major=sys.pypy_version_info.major,
+        pypy_minor=sys.pypy_version_info.minor,
+    )
+
+
+def _generic_abi():
+    abi = sysconfig.get_config_var("SOABI")
+    if abi:
+        return _normalize_string(abi)
+    else:
+        return "none"
+
+
+def _pypy_tags(py_version, interpreter, abi, platforms):
+    for tag in (Tag(interpreter, abi, platform) for platform in platforms):
+        yield tag
+    for tag in (Tag(interpreter, "none", platform) for platform in platforms):
+        yield tag
+
+
+def _generic_tags(interpreter, py_version, abi, platforms):
+    for tag in (Tag(interpreter, abi, platform) for platform in platforms):
+        yield tag
+    if abi != "none":
+        tags = (Tag(interpreter, "none", platform_) for platform_ in platforms)
+        for tag in tags:
+            yield tag
+
+
+def _py_interpreter_range(py_version):
+    """
+    Yield Python versions in descending order.
+
+    After the latest version, the major-only version will be yielded, and then
+    all following versions up to 'end'.
+    """
+    yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1])
+    yield "py{major}".format(major=py_version[0])
+    for minor in range(py_version[1] - 1, -1, -1):
+        yield "py{major}{minor}".format(major=py_version[0], minor=minor)
+
+
+def _independent_tags(interpreter, py_version, platforms):
+    """
+    Return the sequence of tags that are consistent across implementations.
+
+    The tags consist of:
+    - py*-none-<platform>
+    - <interpreter>-none-any
+    - py*-none-any
+    """
+    for version in _py_interpreter_range(py_version):
+        for platform_ in platforms:
+            yield Tag(version, "none", platform_)
+    yield Tag(interpreter, "none", "any")
+    for version in _py_interpreter_range(py_version):
+        yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
+    if not is_32bit:
+        return arch
+
+    if arch.startswith("ppc"):
+        return "ppc"
+
+    return "i386"
+
+
+def _mac_binary_formats(version, cpu_arch):
+    formats = [cpu_arch]
+    if cpu_arch == "x86_64":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat64", "fat32"])
+
+    elif cpu_arch == "i386":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat32", "fat"])
+
+    elif cpu_arch == "ppc64":
+        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+        if version > (10, 5) or version < (10, 4):
+            return []
+        formats.append("fat64")
+
+    elif cpu_arch == "ppc":
+        if version > (10, 6):
+            return []
+        formats.extend(["fat32", "fat"])
+
+    formats.append("universal")
+    return formats
+
+
+def _mac_platforms(version=None, arch=None):
+    version_str, _, cpu_arch = platform.mac_ver()
+    if version is None:
+        version = tuple(map(int, version_str.split(".")[:2]))
+    if arch is None:
+        arch = _mac_arch(cpu_arch)
+    platforms = []
+    for minor_version in range(version[1], -1, -1):
+        compat_version = version[0], minor_version
+        binary_formats = _mac_binary_formats(compat_version, arch)
+        for binary_format in binary_formats:
+            platforms.append(
+                "macosx_{major}_{minor}_{binary_format}".format(
+                    major=compat_version[0],
+                    minor=compat_version[1],
+                    binary_format=binary_format,
+                )
+            )
+    return platforms
+
+
+# From PEP 513.
+def _is_manylinux_compatible(name, glibc_version):
+    # Check for presence of _manylinux module.
+    try:
+        import _manylinux
+
+        return bool(getattr(_manylinux, name + "_compatible"))
+    except (ImportError, AttributeError):
+        # Fall through to heuristic check below.
+        pass
+
+    return _have_compatible_glibc(*glibc_version)
+
+
+def _glibc_version_string():
+    # Returns glibc version string, or None if not using glibc.
+    import ctypes
+
+    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+    # manpage says, "If filename is NULL, then the returned handle is for the
+    # main program". This way we can let the linker do the work to figure out
+    # which libc our process is actually using.
+    process_namespace = ctypes.CDLL(None)
+    try:
+        gnu_get_libc_version = process_namespace.gnu_get_libc_version
+    except AttributeError:
+        # Symbol doesn't exist -> therefore, we are not linked to
+        # glibc.
+        return None
+
+    # Call gnu_get_libc_version, which returns a string like "2.5"
+    gnu_get_libc_version.restype = ctypes.c_char_p
+    version_str = gnu_get_libc_version()
+    # py2 / py3 compatibility:
+    if not isinstance(version_str, str):
+        version_str = version_str.decode("ascii")
+
+    return version_str
+
+
+# Separated out from have_compatible_glibc for easier unit testing.
+def _check_glibc_version(version_str, required_major, minimum_minor):
+    # Parse string and check against requested version.
+    #
+    # We use a regexp instead of str.split because we want to discard any
+    # random junk that might come after the minor version -- this might happen
+    # in patched/forked versions of glibc (e.g. Linaro's version of glibc
+    # uses version strings like "2.20-2014.11"). See gh-3588.
+    m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+    if not m:
+        warnings.warn(
+            "Expected glibc version with 2 components major.minor,"
+            " got: %s" % version_str,
+            RuntimeWarning,
+        )
+        return False
+    return (
+        int(m.group("major")) == required_major
+        and int(m.group("minor")) >= minimum_minor
+    )
+
+
+def _have_compatible_glibc(required_major, minimum_minor):
+    version_str = _glibc_version_string()
+    if version_str is None:
+        return False
+    return _check_glibc_version(version_str, required_major, minimum_minor)
+
+
+def _linux_platforms(is_32bit=_32_BIT_INTERPRETER):
+    linux = _normalize_string(distutils.util.get_platform())
+    if linux == "linux_x86_64" and is_32bit:
+        linux = "linux_i686"
+    manylinux_support = (
+        ("manylinux2014", (2, 17)),  # CentOS 7 w/ glibc 2.17 (PEP 599)
+        ("manylinux2010", (2, 12)),  # CentOS 6 w/ glibc 2.12 (PEP 571)
+        ("manylinux1", (2, 5)),  # CentOS 5 w/ glibc 2.5 (PEP 513)
+    )
+    manylinux_support_iter = iter(manylinux_support)
+    for name, glibc_version in manylinux_support_iter:
+        if _is_manylinux_compatible(name, glibc_version):
+            platforms = [linux.replace("linux", name)]
+            break
+    else:
+        platforms = []
+    # Support for a later manylinux implies support for an earlier version.
+    platforms += [linux.replace("linux", name) for name, _ in manylinux_support_iter]
+    platforms.append(linux)
+    return platforms
+
+
+def _generic_platforms():
+    platform = _normalize_string(distutils.util.get_platform())
+    return [platform]
+
+
+def _interpreter_name():
+    name = platform.python_implementation().lower()
+    return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def _generic_interpreter(name, py_version):
+    version = sysconfig.get_config_var("py_version_nodot")
+    if not version:
+        version = "".join(map(str, py_version[:2]))
+    return "{name}{version}".format(name=name, version=version)
+
+
+def sys_tags():
+    """
+    Returns the sequence of tag triples for the running interpreter.
+
+    The order of the sequence corresponds to priority order for the
+    interpreter, from most to least important.
+    """
+    py_version = sys.version_info[:2]
+    interpreter_name = _interpreter_name()
+    if platform.system() == "Darwin":
+        platforms = _mac_platforms()
+    elif platform.system() == "Linux":
+        platforms = _linux_platforms()
+    else:
+        platforms = _generic_platforms()
+
+    if interpreter_name == "cp":
+        interpreter = _cpython_interpreter(py_version)
+        abis = _cpython_abis(py_version)
+        for tag in _cpython_tags(py_version, interpreter, abis, platforms):
+            yield tag
+    elif interpreter_name == "pp":
+        interpreter = _pypy_interpreter()
+        abi = _generic_abi()
+        for tag in _pypy_tags(py_version, interpreter, abi, platforms):
+            yield tag
+    else:
+        interpreter = _generic_interpreter(interpreter_name, py_version)
+        abi = _generic_abi()
+        for tag in _generic_tags(interpreter, py_version, abi, platforms):
+            yield tag
+    for tag in _independent_tags(interpreter, py_version, platforms):
+        yield tag
index 942387cef5d75f299a769b1eb43b6c7679e7a3a0..88418786933b8bc5f6179b8e191f60f79efd7074 100644 (file)
@@ -5,6 +5,8 @@ from __future__ import absolute_import, division, print_function
 
 import re
 
+from .version import InvalidVersion, Version
+
 
 _canonicalize_regex = re.compile(r"[-_.]+")
 
@@ -12,3 +14,44 @@ _canonicalize_regex = re.compile(r"[-_.]+")
 def canonicalize_name(name):
     # This is taken from PEP 503.
     return _canonicalize_regex.sub("-", name).lower()
+
+
+def canonicalize_version(version):
+    """
+    This is very similar to Version.__str__, but has one subtle differences
+    with the way it handles the release segment.
+    """
+
+    try:
+        version = Version(version)
+    except InvalidVersion:
+        # Legacy versions cannot be normalized
+        return version
+
+    parts = []
+
+    # Epoch
+    if version.epoch != 0:
+        parts.append("{0}!".format(version.epoch))
+
+    # Release segment
+    # NB: This strips trailing '.0's to normalize
+    parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
+
+    # Pre-release
+    if version.pre is not None:
+        parts.append("".join(str(x) for x in version.pre))
+
+    # Post-release
+    if version.post is not None:
+        parts.append(".post{0}".format(version.post))
+
+    # Development release
+    if version.dev is not None:
+        parts.append(".dev{0}".format(version.dev))
+
+    # Local version segment
+    if version.local is not None:
+        parts.append("+{0}".format(version.local))
+
+    return "".join(parts)
index 83b5ee8c5efadf22ce2f16ff08c8a8d75f1eb5df..95157a1f78c26829ffbe1bd2463f7735b636d16f 100644 (file)
@@ -10,14 +10,11 @@ import re
 from ._structures import Infinity
 
 
-__all__ = [
-    "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
-]
+__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
 
 
 _Version = collections.namedtuple(
-    "_Version",
-    ["epoch", "release", "dev", "pre", "post", "local"],
+    "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
 )
 
 
@@ -40,7 +37,6 @@ class InvalidVersion(ValueError):
 
 
 class _BaseVersion(object):
-
     def __hash__(self):
         return hash(self._key)
 
@@ -70,7 +66,6 @@ class _BaseVersion(object):
 
 
 class LegacyVersion(_BaseVersion):
-
     def __init__(self, version):
         self._version = str(version)
         self._key = _legacy_cmpkey(self._version)
@@ -89,6 +84,26 @@ class LegacyVersion(_BaseVersion):
     def base_version(self):
         return self._version
 
+    @property
+    def epoch(self):
+        return -1
+
+    @property
+    def release(self):
+        return None
+
+    @property
+    def pre(self):
+        return None
+
+    @property
+    def post(self):
+        return None
+
+    @property
+    def dev(self):
+        return None
+
     @property
     def local(self):
         return None
@@ -101,13 +116,19 @@ class LegacyVersion(_BaseVersion):
     def is_postrelease(self):
         return False
 
+    @property
+    def is_devrelease(self):
+        return False
 
-_legacy_version_component_re = re.compile(
-    r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
-)
+
+_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
 
 _legacy_version_replacement_map = {
-    "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
+    "pre": "c",
+    "preview": "c",
+    "-": "final-",
+    "rc": "c",
+    "dev": "@",
 }
 
 
@@ -154,6 +175,7 @@ def _legacy_cmpkey(version):
 
     return epoch, parts
 
+
 # Deliberately not anchored to the start and end of the string, to make it
 # easier for 3rd party code to reuse
 VERSION_PATTERN = r"""
@@ -190,10 +212,7 @@ VERSION_PATTERN = r"""
 
 class Version(_BaseVersion):
 
-    _regex = re.compile(
-        r"^\s*" + VERSION_PATTERN + r"\s*$",
-        re.VERBOSE | re.IGNORECASE,
-    )
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
 
     def __init__(self, version):
         # Validate the version and parse it into pieces
@@ -205,18 +224,11 @@ class Version(_BaseVersion):
         self._version = _Version(
             epoch=int(match.group("epoch")) if match.group("epoch") else 0,
             release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(
-                match.group("pre_l"),
-                match.group("pre_n"),
-            ),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
             post=_parse_letter_version(
-                match.group("post_l"),
-                match.group("post_n1") or match.group("post_n2"),
-            ),
-            dev=_parse_letter_version(
-                match.group("dev_l"),
-                match.group("dev_n"),
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
             ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
             local=_parse_local_version(match.group("local")),
         )
 
@@ -237,32 +249,57 @@ class Version(_BaseVersion):
         parts = []
 
         # Epoch
-        if self._version.epoch != 0:
-            parts.append("{0}!".format(self._version.epoch))
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
 
         # Release segment
-        parts.append(".".join(str(x) for x in self._version.release))
+        parts.append(".".join(str(x) for x in self.release))
 
         # Pre-release
-        if self._version.pre is not None:
-            parts.append("".join(str(x) for x in self._version.pre))
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
 
         # Post-release
-        if self._version.post is not None:
-            parts.append(".post{0}".format(self._version.post[1]))
+        if self.post is not None:
+            parts.append(".post{0}".format(self.post))
 
         # Development release
-        if self._version.dev is not None:
-            parts.append(".dev{0}".format(self._version.dev[1]))
+        if self.dev is not None:
+            parts.append(".dev{0}".format(self.dev))
 
         # Local version segment
-        if self._version.local is not None:
-            parts.append(
-                "+{0}".format(".".join(str(x) for x in self._version.local))
-            )
+        if self.local is not None:
+            parts.append("+{0}".format(self.local))
 
         return "".join(parts)
 
+    @property
+    def epoch(self):
+        return self._version.epoch
+
+    @property
+    def release(self):
+        return self._version.release
+
+    @property
+    def pre(self):
+        return self._version.pre
+
+    @property
+    def post(self):
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self):
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self):
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
     @property
     def public(self):
         return str(self).split("+", 1)[0]
@@ -272,27 +309,25 @@ class Version(_BaseVersion):
         parts = []
 
         # Epoch
-        if self._version.epoch != 0:
-            parts.append("{0}!".format(self._version.epoch))
+        if self.epoch != 0:
+            parts.append("{0}!".format(self.epoch))
 
         # Release segment
-        parts.append(".".join(str(x) for x in self._version.release))
+        parts.append(".".join(str(x) for x in self.release))
 
         return "".join(parts)
 
-    @property
-    def local(self):
-        version_string = str(self)
-        if "+" in version_string:
-            return version_string.split("+", 1)[1]
-
     @property
     def is_prerelease(self):
-        return bool(self._version.dev or self._version.pre)
+        return self.dev is not None or self.pre is not None
 
     @property
     def is_postrelease(self):
-        return bool(self._version.post)
+        return self.post is not None
+
+    @property
+    def is_devrelease(self):
+        return self.dev is not None
 
 
 def _parse_letter_version(letter, number):
@@ -326,7 +361,7 @@ def _parse_letter_version(letter, number):
         return letter, int(number)
 
 
-_local_version_seperators = re.compile(r"[\._-]")
+_local_version_separators = re.compile(r"[\._-]")
 
 
 def _parse_local_version(local):
@@ -336,7 +371,7 @@ def _parse_local_version(local):
     if local is not None:
         return tuple(
             part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_seperators.split(local)
+            for part in _local_version_separators.split(local)
         )
 
 
@@ -347,12 +382,7 @@ def _cmpkey(epoch, release, pre, post, dev, local):
     # re-reverse it back into the correct order and make it a tuple and use
     # that for our sorting key.
     release = tuple(
-        reversed(list(
-            itertools.dropwhile(
-                lambda x: x == 0,
-                reversed(release),
-            )
-        ))
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
     )
 
     # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
@@ -385,9 +415,6 @@ def _cmpkey(epoch, release, pre, post, dev, local):
         # - Numeric segments sort numerically
         # - Shorter versions sort before longer versions when the prefixes
         #   match exactly
-        local = tuple(
-            (i, "") if isinstance(i, int) else (-Infinity, i)
-            for i in local
-        )
+        local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local)
 
     return epoch, release, pre, post, dev, local
index 5731b4244ba3c54728dbc2bb63950a69c803a429..65183d9a2a9b22bcade6ac0ed28ad2b88405f907 100644 (file)
@@ -1,4 +1,4 @@
-packaging==16.8
+packaging==19.2
 pyparsing==2.2.1
 six==1.10.0
 ordered-set==3.1.1
index fe619e2e676f6e0cf95b2af63cdab33ed386f6b0..743f5588faf3ad79850df7bd196749e7a6c03f93 100644 (file)
@@ -2,8 +2,7 @@ __all__ = [
     'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
     'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
     'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
-    'register', 'bdist_wininst', 'upload_docs', 'upload', 'build_clib',
-    'dist_info',
+    'bdist_wininst', 'upload_docs', 'build_clib', 'dist_info',
 ]
 
 from distutils.command.bdist import bdist
index 545c3c442688c25e0b2d5c671a77b86f8fd44ab8..09066f8c861eaa25c541a206057dd13e5a18c75d 100644 (file)
@@ -410,7 +410,13 @@ class easy_install(Command):
         ]
         self._expand_attrs(dirs)
 
-    def run(self):
+    def run(self, show_deprecation=True):
+        if show_deprecation:
+            self.announce(
+                "WARNING: The easy_install command is deprecated "
+                "and will be removed in a future version."
+                , log.WARN,
+            )
         if self.verbose != self.distribution.verbose:
             log.set_verbosity(self.verbose)
         try:
index 31a5ddb57739f8dc132cef1b9f5d252befb67ca7..72b9a3e424707633c7e31a347170f358cfa3f87a 100644 (file)
@@ -114,7 +114,7 @@ class install(orig.install):
             args.insert(0, setuptools.bootstrap_install_from)
 
         cmd.args = args
-        cmd.run()
+        cmd.run(show_deprecation=False)
         setuptools.bootstrap_install_from = None
 
 
index 98bc01566f42b289e08cc180fb6e00cc5f4824a3..b8266b9a60f8c363ba35f7b73befd7c9c7cb4abc 100644 (file)
@@ -1,18 +1,18 @@
 from distutils import log
 import distutils.command.register as orig
 
+from setuptools.errors import RemovedCommandError
+
 
 class register(orig.register):
-    __doc__ = orig.register.__doc__
+    """Formerly used to register packages on PyPI."""
 
     def run(self):
-        try:
-            # Make sure that we are using valid current name/version info
-            self.run_command('egg_info')
-            orig.register.run(self)
-        finally:
-            self.announce(
-                "WARNING: Registering is deprecated, use twine to "
-                "upload instead (https://pypi.org/p/twine/)",
-                log.WARN
-            )
+        msg = (
+            "The register command has been removed, use twine to upload "
+            + "instead (https://pypi.org/p/twine)"
+        )
+
+        self.announce("ERROR: " + msg, log.ERROR)
+
+        raise RemovedCommandError(msg)
index dc25398147cfedcbc01be33ee25b693361a685cf..55ecdd978a229faec13863882e5c5fd989a12190 100644 (file)
@@ -5,7 +5,7 @@ import sys
 import io
 import contextlib
 
-from setuptools.extern import six
+from setuptools.extern import six, ordered_set
 
 from .py36compat import sdist_add_defaults
 
@@ -200,10 +200,12 @@ class sdist(sdist_add_defaults, orig.sdist):
         manifest.close()
 
     def check_license(self):
-        """Checks if license_file' is configured and adds it to
-        'self.filelist' if the value contains a valid path.
+        """Checks if license_file' or 'license_files' is configured and adds any
+        valid paths to 'self.filelist'.
         """
 
+        files = ordered_set.OrderedSet()
+
         opts = self.distribution.get_option_dict('metadata')
 
         # ignore the source of the value
@@ -211,11 +213,19 @@ class sdist(sdist_add_defaults, orig.sdist):
 
         if license_file is None:
             log.debug("'license_file' option was not specified")
-            return
+        else:
+            files.add(license_file)
 
-        if not os.path.exists(license_file):
-            log.warn("warning: Failed to find the configured license file '%s'",
-                    license_file)
-            return
+        try:
+            files.update(self.distribution.metadata.license_files)
+        except TypeError:
+            log.warn("warning: 'license_files' option is malformed")
+
+        for f in files:
+            if not os.path.exists(f):
+                log.warn(
+                    "warning: Failed to find the configured license file '%s'",
+                    f)
+                files.remove(f)
 
-        self.filelist.append(license_file)
+        self.filelist.extend(files)
index 6db8888bb29cc59ef23e42a4b3335c311a6ef0ad..ec7f81e22772511d668e5ab92f625db33259e803 100644 (file)
-import io
-import os
-import hashlib
-import getpass
-
-from base64 import standard_b64encode
-
 from distutils import log
 from distutils.command import upload as orig
-from distutils.spawn import spawn
-
-from distutils.errors import DistutilsError
 
-from setuptools.extern.six.moves.urllib.request import urlopen, Request
-from setuptools.extern.six.moves.urllib.error import HTTPError
-from setuptools.extern.six.moves.urllib.parse import urlparse
+from setuptools.errors import RemovedCommandError
 
 
 class upload(orig.upload):
-    """
-    Override default upload behavior to obtain password
-    in a variety of different ways.
-    """
-    def run(self):
-        try:
-            orig.upload.run(self)
-        finally:
-            self.announce(
-                "WARNING: Uploading via this command is deprecated, use twine "
-                "to upload instead (https://pypi.org/p/twine/)",
-                log.WARN
-            )
+    """Formerly used to upload packages to PyPI."""
 
-    def finalize_options(self):
-        orig.upload.finalize_options(self)
-        self.username = (
-            self.username or
-            getpass.getuser()
-        )
-        # Attempt to obtain password. Short circuit evaluation at the first
-        # sign of success.
-        self.password = (
-            self.password or
-            self._load_password_from_keyring() or
-            self._prompt_for_password()
+    def run(self):
+        msg = (
+            "The upload command has been removed, use twine to upload "
+            + "instead (https://pypi.org/p/twine)"
         )
 
-    def upload_file(self, command, pyversion, filename):
-        # Makes sure the repository URL is compliant
-        schema, netloc, url, params, query, fragments = \
-            urlparse(self.repository)
-        if params or query or fragments:
-            raise AssertionError("Incompatible url %s" % self.repository)
-
-        if schema not in ('http', 'https'):
-            raise AssertionError("unsupported schema " + schema)
-
-        # Sign if requested
-        if self.sign:
-            gpg_args = ["gpg", "--detach-sign", "-a", filename]
-            if self.identity:
-                gpg_args[2:2] = ["--local-user", self.identity]
-            spawn(gpg_args,
-                  dry_run=self.dry_run)
-
-        # Fill in the data - send all the meta-data in case we need to
-        # register a new release
-        with open(filename, 'rb') as f:
-            content = f.read()
-
-        meta = self.distribution.metadata
-
-        data = {
-            # action
-            ':action': 'file_upload',
-            'protocol_version': '1',
-
-            # identify release
-            'name': meta.get_name(),
-            'version': meta.get_version(),
-
-            # file content
-            'content': (os.path.basename(filename), content),
-            'filetype': command,
-            'pyversion': pyversion,
-            'md5_digest': hashlib.md5(content).hexdigest(),
-
-            # additional meta-data
-            'metadata_version': str(meta.get_metadata_version()),
-            'summary': meta.get_description(),
-            'home_page': meta.get_url(),
-            'author': meta.get_contact(),
-            'author_email': meta.get_contact_email(),
-            'license': meta.get_licence(),
-            'description': meta.get_long_description(),
-            'keywords': meta.get_keywords(),
-            'platform': meta.get_platforms(),
-            'classifiers': meta.get_classifiers(),
-            'download_url': meta.get_download_url(),
-            # PEP 314
-            'provides': meta.get_provides(),
-            'requires': meta.get_requires(),
-            'obsoletes': meta.get_obsoletes(),
-            }
-
-        data['comment'] = ''
-
-        if self.sign:
-            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
-                                     open(filename+".asc", "rb").read())
-
-        # set up the authentication
-        user_pass = (self.username + ":" + self.password).encode('ascii')
-        # The exact encoding of the authentication string is debated.
-        # Anyway PyPI only accepts ascii for both username or password.
-        auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
-
-        # Build up the MIME payload for the POST data
-        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
-        sep_boundary = b'\r\n--' + boundary.encode('ascii')
-        end_boundary = sep_boundary + b'--\r\n'
-        body = io.BytesIO()
-        for key, value in data.items():
-            title = '\r\nContent-Disposition: form-data; name="%s"' % key
-            # handle multiple entries for the same name
-            if not isinstance(value, list):
-                value = [value]
-            for value in value:
-                if type(value) is tuple:
-                    title += '; filename="%s"' % value[0]
-                    value = value[1]
-                else:
-                    value = str(value).encode('utf-8')
-                body.write(sep_boundary)
-                body.write(title.encode('utf-8'))
-                body.write(b"\r\n\r\n")
-                body.write(value)
-        body.write(end_boundary)
-        body = body.getvalue()
-
-        msg = "Submitting %s to %s" % (filename, self.repository)
-        self.announce(msg, log.INFO)
-
-        # build the Request
-        headers = {
-            'Content-type': 'multipart/form-data; boundary=%s' % boundary,
-            'Content-length': str(len(body)),
-            'Authorization': auth,
-        }
-
-        request = Request(self.repository, data=body,
-                          headers=headers)
-        # send the data
-        try:
-            result = urlopen(request)
-            status = result.getcode()
-            reason = result.msg
-        except HTTPError as e:
-            status = e.code
-            reason = e.msg
-        except OSError as e:
-            self.announce(str(e), log.ERROR)
-            raise
-
-        if status == 200:
-            self.announce('Server response (%s): %s' % (status, reason),
-                          log.INFO)
-            if self.show_response:
-                text = getattr(self, '_read_pypi_response',
-                               lambda x: None)(result)
-                if text is not None:
-                    msg = '\n'.join(('-' * 75, text, '-' * 75))
-                    self.announce(msg, log.INFO)
-        else:
-            msg = 'Upload failed (%s): %s' % (status, reason)
-            self.announce(msg, log.ERROR)
-            raise DistutilsError(msg)
-
-    def _load_password_from_keyring(self):
-        """
-        Attempt to load password from keyring. Suppress Exceptions.
-        """
-        try:
-            keyring = __import__('keyring')
-            return keyring.get_password(self.repository, self.username)
-        except Exception:
-            pass
-
-    def _prompt_for_password(self):
-        """
-        Prompt for a password on the tty. Suppress Exceptions.
-        """
-        try:
-            return getpass.getpass()
-        except (Exception, KeyboardInterrupt):
-            pass
+        self.announce("ERROR: " + msg, log.ERROR)
+        raise RemovedCommandError(msg)
index 2d50e25e8990a665c4832660fd80227fcf1af04a..9b9a0c45e756b44ddea7660228934d0a37fcd97c 100644 (file)
@@ -483,6 +483,7 @@ class ConfigMetadataHandler(ConfigHandler):
             'obsoletes': parse_list,
             'classifiers': self._get_parser_compound(parse_file, parse_list),
             'license': exclude_files_parser('license'),
+            'license_files': parse_list,
             'description': parse_file,
             'long_description': parse_file,
             'version': self._parse_version,
index 2e5ad4bd687a823b2d9b0df4636c8ed0f2976c2e..1ba262ec8b6fa43f3a42047260dac076ad0d70d3 100644 (file)
@@ -409,6 +409,7 @@ class Distribution(_Distribution):
         'long_description_content_type': None,
         'project_urls': dict,
         'provides_extras': ordered_set.OrderedSet,
+        'license_files': ordered_set.OrderedSet,
     }
 
     _patched_dist = None
@@ -724,15 +725,28 @@ class Distribution(_Distribution):
         return resolved_dists
 
     def finalize_options(self):
-        _Distribution.finalize_options(self)
-        if self.features:
-            self._set_global_opts_from_features()
+        """
+        Allow plugins to apply arbitrary operations to the
+        distribution. Each hook may optionally define a 'order'
+        to influence the order of execution. Smaller numbers
+        go first and the default is 0.
+        """
+        hook_key = 'setuptools.finalize_distribution_options'
+
+        def by_order(hook):
+            return getattr(hook, 'order', 0)
+        eps = pkg_resources.iter_entry_points(hook_key)
+        for ep in sorted(eps, key=by_order):
+            ep.load()(self)
 
+    def _finalize_setup_keywords(self):
         for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
             value = getattr(self, ep.name, None)
             if value is not None:
                 ep.require(installer=self.fetch_build_egg)
                 ep.load()(self, ep.name, value)
+
+    def _finalize_2to3_doctests(self):
         if getattr(self, 'convert_2to3_doctests', None):
             # XXX may convert to set here when we can rely on set being builtin
             self.convert_2to3_doctests = [
@@ -759,36 +773,15 @@ class Distribution(_Distribution):
 
     def fetch_build_egg(self, req):
         """Fetch an egg needed for building"""
-        from setuptools.command.easy_install import easy_install
-        dist = self.__class__({'script_args': ['easy_install']})
-        opts = dist.get_option_dict('easy_install')
-        opts.clear()
-        opts.update(
-            (k, v)
-            for k, v in self.get_option_dict('easy_install').items()
-            if k in (
-                # don't use any other settings
-                'find_links', 'site_dirs', 'index_url',
-                'optimize', 'site_dirs', 'allow_hosts',
-            ))
-        if self.dependency_links:
-            links = self.dependency_links[:]
-            if 'find_links' in opts:
-                links = opts['find_links'][1] + links
-            opts['find_links'] = ('setup', links)
-        install_dir = self.get_egg_cache_dir()
-        cmd = easy_install(
-            dist, args=["x"], install_dir=install_dir,
-            exclude_scripts=True,
-            always_copy=False, build_directory=None, editable=False,
-            upgrade=False, multi_version=True, no_report=True, user=False
-        )
-        cmd.ensure_finalized()
-        return cmd.easy_install(req)
+        from setuptools.installer import fetch_build_egg
+        return fetch_build_egg(self, req)
 
-    def _set_global_opts_from_features(self):
+    def _finalize_feature_opts(self):
         """Add --with-X/--without-X options based on optional features"""
 
+        if not self.features:
+            return
+
         go = []
         no = self.negative_opt.copy()
 
diff --git a/setuptools/errors.py b/setuptools/errors.py
new file mode 100644 (file)
index 0000000..2701747
--- /dev/null
@@ -0,0 +1,16 @@
+"""setuptools.errors
+
+Provides exceptions used by setuptools modules.
+"""
+
+from distutils.errors import DistutilsError
+
+
+class RemovedCommandError(DistutilsError, RuntimeError):
+    """Error used for commands that have been removed in setuptools.
+
+    Since ``setuptools`` is built on ``distutils``, simply removing a command
+    from ``setuptools`` will make the behavior fall back to ``distutils``; this
+    error is raised if a command exists in ``distutils`` but has been actively
+    removed in ``setuptools``.
+    """
diff --git a/setuptools/glibc.py b/setuptools/glibc.py
deleted file mode 100644 (file)
index a134591..0000000
+++ /dev/null
@@ -1,86 +0,0 @@
-# This file originally from pip:
-# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/utils/glibc.py
-from __future__ import absolute_import
-
-import ctypes
-import re
-import warnings
-
-
-def glibc_version_string():
-    "Returns glibc version string, or None if not using glibc."
-
-    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
-    # manpage says, "If filename is NULL, then the returned handle is for the
-    # main program". This way we can let the linker do the work to figure out
-    # which libc our process is actually using.
-    process_namespace = ctypes.CDLL(None)
-    try:
-        gnu_get_libc_version = process_namespace.gnu_get_libc_version
-    except AttributeError:
-        # Symbol doesn't exist -> therefore, we are not linked to
-        # glibc.
-        return None
-
-    # Call gnu_get_libc_version, which returns a string like "2.5"
-    gnu_get_libc_version.restype = ctypes.c_char_p
-    version_str = gnu_get_libc_version()
-    # py2 / py3 compatibility:
-    if not isinstance(version_str, str):
-        version_str = version_str.decode("ascii")
-
-    return version_str
-
-
-# Separated out from have_compatible_glibc for easier unit testing
-def check_glibc_version(version_str, required_major, minimum_minor):
-    # Parse string and check against requested version.
-    #
-    # We use a regexp instead of str.split because we want to discard any
-    # random junk that might come after the minor version -- this might happen
-    # in patched/forked versions of glibc (e.g. Linaro's version of glibc
-    # uses version strings like "2.20-2014.11"). See gh-3588.
-    m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
-    if not m:
-        warnings.warn("Expected glibc version with 2 components major.minor,"
-                      " got: %s" % version_str, RuntimeWarning)
-        return False
-    return (int(m.group("major")) == required_major and
-            int(m.group("minor")) >= minimum_minor)
-
-
-def have_compatible_glibc(required_major, minimum_minor):
-    version_str = glibc_version_string()
-    if version_str is None:
-        return False
-    return check_glibc_version(version_str, required_major, minimum_minor)
-
-
-# platform.libc_ver regularly returns completely nonsensical glibc
-# versions. E.g. on my computer, platform says:
-#
-#   ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
-#   ('glibc', '2.7')
-#   ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
-#   ('glibc', '2.9')
-#
-# But the truth is:
-#
-#   ~$ ldd --version
-#   ldd (Debian GLIBC 2.22-11) 2.22
-#
-# This is unfortunate, because it means that the linehaul data on libc
-# versions that was generated by pip 8.1.2 and earlier is useless and
-# misleading. Solution: instead of using platform, use our code that actually
-# works.
-def libc_ver():
-    """Try to determine the glibc version
-
-    Returns a tuple of strings (lib, version) which default to empty strings
-    in case the lookup fails.
-    """
-    glibc_version = glibc_version_string()
-    if glibc_version is None:
-        return ("", "")
-    else:
-        return ("glibc", glibc_version)
diff --git a/setuptools/installer.py b/setuptools/installer.py
new file mode 100644 (file)
index 0000000..35bc3cc
--- /dev/null
@@ -0,0 +1,129 @@
+import glob
+import os
+import subprocess
+import sys
+from distutils import log
+from distutils.errors import DistutilsError
+
+import pkg_resources
+from setuptools.command.easy_install import easy_install
+from setuptools.wheel import Wheel
+
+from .py31compat import TemporaryDirectory
+
+
+def _legacy_fetch_build_egg(dist, req):
+    """Fetch an egg needed for building.
+
+    Legacy path using EasyInstall.
+    """
+    tmp_dist = dist.__class__({'script_args': ['easy_install']})
+    opts = tmp_dist.get_option_dict('easy_install')
+    opts.clear()
+    opts.update(
+        (k, v)
+        for k, v in dist.get_option_dict('easy_install').items()
+        if k in (
+            # don't use any other settings
+            'find_links', 'site_dirs', 'index_url',
+            'optimize', 'site_dirs', 'allow_hosts',
+        ))
+    if dist.dependency_links:
+        links = dist.dependency_links[:]
+        if 'find_links' in opts:
+            links = opts['find_links'][1] + links
+        opts['find_links'] = ('setup', links)
+    install_dir = dist.get_egg_cache_dir()
+    cmd = easy_install(
+        tmp_dist, args=["x"], install_dir=install_dir,
+        exclude_scripts=True,
+        always_copy=False, build_directory=None, editable=False,
+        upgrade=False, multi_version=True, no_report=True, user=False
+    )
+    cmd.ensure_finalized()
+    return cmd.easy_install(req)
+
+
+def fetch_build_egg(dist, req):
+    """Fetch an egg needed for building.
+
+    Use pip/wheel to fetch/build a wheel."""
+    # Check pip is available.
+    try:
+        pkg_resources.get_distribution('pip')
+    except pkg_resources.DistributionNotFound:
+        dist.announce(
+            'WARNING: The pip package is not available, falling back '
+            'to EasyInstall for handling setup_requires/test_requires; '
+            'this is deprecated and will be removed in a future version.'
+            , log.WARN
+        )
+        return _legacy_fetch_build_egg(dist, req)
+    # Warn if wheel is not.
+    try:
+        pkg_resources.get_distribution('wheel')
+    except pkg_resources.DistributionNotFound:
+        dist.announce('WARNING: The wheel package is not available.', log.WARN)
+    if not isinstance(req, pkg_resources.Requirement):
+        req = pkg_resources.Requirement.parse(req)
+    # Take easy_install options into account, but do not override relevant
+    # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll
+    # take precedence.
+    opts = dist.get_option_dict('easy_install')
+    if 'allow_hosts' in opts:
+        raise DistutilsError('the `allow-hosts` option is not supported '
+                             'when using pip to install requirements.')
+    if 'PIP_QUIET' in os.environ or 'PIP_VERBOSE' in os.environ:
+        quiet = False
+    else:
+        quiet = True
+    if 'PIP_INDEX_URL' in os.environ:
+        index_url = None
+    elif 'index_url' in opts:
+        index_url = opts['index_url'][1]
+    else:
+        index_url = None
+    if 'find_links' in opts:
+        find_links = opts['find_links'][1][:]
+    else:
+        find_links = []
+    if dist.dependency_links:
+        find_links.extend(dist.dependency_links)
+    eggs_dir = os.path.realpath(dist.get_egg_cache_dir())
+    environment = pkg_resources.Environment()
+    for egg_dist in pkg_resources.find_distributions(eggs_dir):
+        if egg_dist in req and environment.can_add(egg_dist):
+            return egg_dist
+    with TemporaryDirectory() as tmpdir:
+        cmd = [
+            sys.executable, '-m', 'pip',
+            '--disable-pip-version-check',
+            'wheel', '--no-deps',
+            '-w', tmpdir,
+        ]
+        if quiet:
+            cmd.append('--quiet')
+        if index_url is not None:
+            cmd.extend(('--index-url', index_url))
+        if find_links is not None:
+            for link in find_links:
+                cmd.extend(('--find-links', link))
+        # If requirement is a PEP 508 direct URL, directly pass
+        # the URL to pip, as `req @ url` does not work on the
+        # command line.
+        if req.url:
+            cmd.append(req.url)
+        else:
+            cmd.append(str(req))
+        try:
+            subprocess.check_call(cmd)
+        except subprocess.CalledProcessError as e:
+            raise DistutilsError(str(e))
+        wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0])
+        dist_location = os.path.join(eggs_dir, wheel.egg_name())
+        wheel.install_as_egg(dist_location)
+        dist_metadata = pkg_resources.PathMetadata(
+            dist_location, os.path.join(dist_location, 'EGG-INFO'))
+        dist = pkg_resources.Distribution.from_filename(
+            dist_location, metadata=dist_metadata)
+        return dist
diff --git a/setuptools/pep425tags.py b/setuptools/pep425tags.py
deleted file mode 100644 (file)
index 48745a2..0000000
+++ /dev/null
@@ -1,319 +0,0 @@
-# This file originally from pip:
-# https://github.com/pypa/pip/blob/8f4f15a5a95d7d5b511ceaee9ed261176c181970/src/pip/_internal/pep425tags.py
-"""Generate and work with PEP 425 Compatibility Tags."""
-from __future__ import absolute_import
-
-import distutils.util
-from distutils import log
-import platform
-import re
-import sys
-import sysconfig
-import warnings
-from collections import OrderedDict
-
-from .extern import six
-
-from . import glibc
-
-_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
-
-
-def get_config_var(var):
-    try:
-        return sysconfig.get_config_var(var)
-    except IOError as e:  # Issue #1074
-        warnings.warn("{}".format(e), RuntimeWarning)
-        return None
-
-
-def get_abbr_impl():
-    """Return abbreviated implementation name."""
-    if hasattr(sys, 'pypy_version_info'):
-        pyimpl = 'pp'
-    elif sys.platform.startswith('java'):
-        pyimpl = 'jy'
-    elif sys.platform == 'cli':
-        pyimpl = 'ip'
-    else:
-        pyimpl = 'cp'
-    return pyimpl
-
-
-def get_impl_ver():
-    """Return implementation version."""
-    impl_ver = get_config_var("py_version_nodot")
-    if not impl_ver or get_abbr_impl() == 'pp':
-        impl_ver = ''.join(map(str, get_impl_version_info()))
-    return impl_ver
-
-
-def get_impl_version_info():
-    """Return sys.version_info-like tuple for use in decrementing the minor
-    version."""
-    if get_abbr_impl() == 'pp':
-        # as per https://github.com/pypa/pip/issues/2882
-        return (sys.version_info[0], sys.pypy_version_info.major,
-                sys.pypy_version_info.minor)
-    else:
-        return sys.version_info[0], sys.version_info[1]
-
-
-def get_impl_tag():
-    """
-    Returns the Tag for this specific implementation.
-    """
-    return "{}{}".format(get_abbr_impl(), get_impl_ver())
-
-
-def get_flag(var, fallback, expected=True, warn=True):
-    """Use a fallback method for determining SOABI flags if the needed config
-    var is unset or unavailable."""
-    val = get_config_var(var)
-    if val is None:
-        if warn:
-            log.debug("Config variable '%s' is unset, Python ABI tag may "
-                      "be incorrect", var)
-        return fallback()
-    return val == expected
-
-
-def get_abi_tag():
-    """Return the ABI tag based on SOABI (if available) or emulate SOABI
-    (CPython 2, PyPy)."""
-    soabi = get_config_var('SOABI')
-    impl = get_abbr_impl()
-    if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
-        d = ''
-        m = ''
-        u = ''
-        if get_flag('Py_DEBUG',
-                    lambda: hasattr(sys, 'gettotalrefcount'),
-                    warn=(impl == 'cp')):
-            d = 'd'
-        if get_flag('WITH_PYMALLOC',
-                    lambda: impl == 'cp',
-                    warn=(impl == 'cp')):
-            m = 'm'
-        if get_flag('Py_UNICODE_SIZE',
-                    lambda: sys.maxunicode == 0x10ffff,
-                    expected=4,
-                    warn=(impl == 'cp' and
-                          six.PY2)) \
-                and six.PY2:
-            u = 'u'
-        abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
-    elif soabi and soabi.startswith('cpython-'):
-        abi = 'cp' + soabi.split('-')[1]
-    elif soabi:
-        abi = soabi.replace('.', '_').replace('-', '_')
-    else:
-        abi = None
-    return abi
-
-
-def _is_running_32bit():
-    return sys.maxsize == 2147483647
-
-
-def get_platform():
-    """Return our platform name 'win32', 'linux_x86_64'"""
-    if sys.platform == 'darwin':
-        # distutils.util.get_platform() returns the release based on the value
-        # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
-        # be significantly older than the user's current machine.
-        release, _, machine = platform.mac_ver()
-        split_ver = release.split('.')
-
-        if machine == "x86_64" and _is_running_32bit():
-            machine = "i386"
-        elif machine == "ppc64" and _is_running_32bit():
-            machine = "ppc"
-
-        return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
-
-    # XXX remove distutils dependency
-    result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
-    if result == "linux_x86_64" and _is_running_32bit():
-        # 32 bit Python program (running on a 64 bit Linux): pip should only
-        # install and run 32 bit compiled extensions in that case.
-        result = "linux_i686"
-
-    return result
-
-
-def is_manylinux1_compatible():
-    # Only Linux, and only x86-64 / i686
-    if get_platform() not in {"linux_x86_64", "linux_i686"}:
-        return False
-
-    # Check for presence of _manylinux module
-    try:
-        import _manylinux
-        return bool(_manylinux.manylinux1_compatible)
-    except (ImportError, AttributeError):
-        # Fall through to heuristic check below
-        pass
-
-    # Check glibc version. CentOS 5 uses glibc 2.5.
-    return glibc.have_compatible_glibc(2, 5)
-
-
-def get_darwin_arches(major, minor, machine):
-    """Return a list of supported arches (including group arches) for
-    the given major, minor and machine architecture of a macOS machine.
-    """
-    arches = []
-
-    def _supports_arch(major, minor, arch):
-        # Looking at the application support for macOS versions in the chart
-        # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
-        # our timeline looks roughly like:
-        #
-        # 10.0 - Introduces ppc support.
-        # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
-        #        and x86_64 support is CLI only, and cannot be used for GUI
-        #        applications.
-        # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
-        # 10.6 - Drops support for ppc64
-        # 10.7 - Drops support for ppc
-        #
-        # Given that we do not know if we're installing a CLI or a GUI
-        # application, we must be conservative and assume it might be a GUI
-        # application and behave as if ppc64 and x86_64 support did not occur
-        # until 10.5.
-        #
-        # Note: The above information is taken from the "Application support"
-        #       column in the chart not the "Processor support" since I believe
-        #       that we care about what instruction sets an application can use
-        #       not which processors the OS supports.
-        if arch == 'ppc':
-            return (major, minor) <= (10, 5)
-        if arch == 'ppc64':
-            return (major, minor) == (10, 5)
-        if arch == 'i386':
-            return (major, minor) >= (10, 4)
-        if arch == 'x86_64':
-            return (major, minor) >= (10, 5)
-        if arch in groups:
-            for garch in groups[arch]:
-                if _supports_arch(major, minor, garch):
-                    return True
-        return False
-
-    groups = OrderedDict([
-        ("fat", ("i386", "ppc")),
-        ("intel", ("x86_64", "i386")),
-        ("fat64", ("x86_64", "ppc64")),
-        ("fat32", ("x86_64", "i386", "ppc")),
-    ])
-
-    if _supports_arch(major, minor, machine):
-        arches.append(machine)
-
-    for garch in groups:
-        if machine in groups[garch] and _supports_arch(major, minor, garch):
-            arches.append(garch)
-
-    arches.append('universal')
-
-    return arches
-
-
-def get_supported(versions=None, noarch=False, platform=None,
-                  impl=None, abi=None):
-    """Return a list of supported tags for each version specified in
-    `versions`.
-
-    :param versions: a list of string versions, of the form ["33", "32"],
-        or None. The first version will be assumed to support our ABI.
-    :param platform: specify the exact platform you want valid
-        tags for, or None. If None, use the local system platform.
-    :param impl: specify the exact implementation you want valid
-        tags for, or None. If None, use the local interpreter impl.
-    :param abi: specify the exact abi you want valid
-        tags for, or None. If None, use the local interpreter abi.
-    """
-    supported = []
-
-    # Versions must be given with respect to the preference
-    if versions is None:
-        versions = []
-        version_info = get_impl_version_info()
-        major = version_info[:-1]
-        # Support all previous minor Python versions.
-        for minor in range(version_info[-1], -1, -1):
-            versions.append(''.join(map(str, major + (minor,))))
-
-    impl = impl or get_abbr_impl()
-
-    abis = []
-
-    abi = abi or get_abi_tag()
-    if abi:
-        abis[0:0] = [abi]
-
-    abi3s = set()
-    import imp
-    for suffix in imp.get_suffixes():
-        if suffix[0].startswith('.abi'):
-            abi3s.add(suffix[0].split('.', 2)[1])
-
-    abis.extend(sorted(list(abi3s)))
-
-    abis.append('none')
-
-    if not noarch:
-        arch = platform or get_platform()
-        if arch.startswith('macosx'):
-            # support macosx-10.6-intel on macosx-10.9-x86_64
-            match = _osx_arch_pat.match(arch)
-            if match:
-                name, major, minor, actual_arch = match.groups()
-                tpl = '{}_{}_%i_%s'.format(name, major)
-                arches = []
-                for m in reversed(range(int(minor) + 1)):
-                    for a in get_darwin_arches(int(major), m, actual_arch):
-                        arches.append(tpl % (m, a))
-            else:
-                # arch pattern didn't match (?!)
-                arches = [arch]
-        elif platform is None and is_manylinux1_compatible():
-            arches = [arch.replace('linux', 'manylinux1'), arch]
-        else:
-            arches = [arch]
-
-        # Current version, current API (built specifically for our Python):
-        for abi in abis:
-            for arch in arches:
-                supported.append(('%s%s' % (impl, versions[0]), abi, arch))
-
-        # abi3 modules compatible with older version of Python
-        for version in versions[1:]:
-            # abi3 was introduced in Python 3.2
-            if version in {'31', '30'}:
-                break
-            for abi in abi3s:   # empty set if not Python 3
-                for arch in arches:
-                    supported.append(("%s%s" % (impl, version), abi, arch))
-
-        # Has binaries, does not use the Python API:
-        for arch in arches:
-            supported.append(('py%s' % (versions[0][0]), 'none', arch))
-
-    # No abi / arch, but requires our implementation:
-    supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
-    # Tagged specifically as being cross-version compatible
-    # (with just the major version specified)
-    supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
-
-    # No abi / arch, generic Python
-    for i, version in enumerate(versions):
-        supported.append(('py%s' % (version,), 'none', 'any'))
-        if i == 0:
-            supported.append(('py%s' % (version[0]), 'none', 'any'))
-
-    return supported
-
-
-implementation_tag = get_impl_tag()
index fc3a5975ef8d2d453b2439d379b757258e5b9998..8b17b0816e47dcebbacc32e2608a7420cac63e9e 100644 (file)
@@ -1,10 +1,13 @@
 """Basic http server for tests to simulate PyPI or custom indexes
 """
 
+import os
 import time
 import threading
 
 from setuptools.extern.six.moves import BaseHTTPServer, SimpleHTTPServer
+from setuptools.extern.six.moves.urllib_parse import urljoin
+from setuptools.extern.six.moves.urllib.request import pathname2url
 
 
 class IndexServer(BaseHTTPServer.HTTPServer):
@@ -69,6 +72,20 @@ class MockServer(BaseHTTPServer.HTTPServer, threading.Thread):
     def run(self):
         self.serve_forever()
 
+    @property
+    def netloc(self):
+        return 'localhost:%s' % self.server_port
+
     @property
     def url(self):
-        return 'http://localhost:%(server_port)s/' % vars(self)
+        return 'http://%s/' % self.netloc
+
+
+def path_to_url(path, authority=None):
+    """ Convert a path to a file: URL. """
+    path = os.path.normpath(os.path.abspath(path))
+    base = 'file:'
+    if authority is not None:
+        base += '//' + authority
+    url = urljoin(base, pathname2url(path))
+    return url
index c3fd1c6ef427678e0f75841926a0c4ac9f2e3423..aa75899a061222eff19eb331e827f86c947cd5fc 100644 (file)
@@ -15,24 +15,24 @@ import distutils.errors
 import io
 import zipfile
 import mock
-from setuptools.command.easy_install import (
-    EasyInstallDeprecationWarning, ScriptWriter, WindowsScriptWriter,
-)
 import time
+
 from setuptools.extern import six
-from setuptools.extern.six.moves import urllib
 
 import pytest
 
 from setuptools import sandbox
 from setuptools.sandbox import run_setup
 import setuptools.command.easy_install as ei
-from setuptools.command.easy_install import PthDistributions
+from setuptools.command.easy_install import (
+    EasyInstallDeprecationWarning, ScriptWriter, PthDistributions,
+    WindowsScriptWriter,
+)
 from setuptools.command import easy_install as easy_install_pkg
 from setuptools.dist import Distribution
 from pkg_resources import normalize_path, working_set
 from pkg_resources import Distribution as PRDistribution
-import setuptools.tests.server
+from setuptools.tests.server import MockServer, path_to_url
 from setuptools.tests import fail_on_ascii
 import pkg_resources
 
@@ -440,35 +440,40 @@ def distutils_package():
         yield
 
 
+@pytest.fixture
+def mock_index():
+    # set up a server which will simulate an alternate package index.
+    p_index = MockServer()
+    if p_index.server_port == 0:
+        # Some platforms (Jython) don't find a port to which to bind,
+        # so skip test for them.
+        pytest.skip("could not find a valid port")
+    p_index.start()
+    return p_index
+
+
 class TestDistutilsPackage:
     def test_bdist_egg_available_on_distutils_pkg(self, distutils_package):
         run_setup('setup.py', ['bdist_egg'])
 
 
 class TestSetupRequires:
-    def test_setup_requires_honors_fetch_params(self):
+
+    def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch):
         """
         When easy_install installs a source distribution which specifies
         setup_requires, it should honor the fetch parameters (such as
-        allow-hosts, index-url, and find-links).
+        index-url, and find-links).
         """
-        # set up a server which will simulate an alternate package index.
-        p_index = setuptools.tests.server.MockServer()
-        p_index.start()
-        netloc = 1
-        p_index_loc = urllib.parse.urlparse(p_index.url)[netloc]
-        if p_index_loc.endswith(':0'):
-            # Some platforms (Jython) don't find a port to which to bind,
-            #  so skip this test for them.
-            return
+        monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+        monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
         with contexts.quiet():
             # create an sdist that has a build-time dependency.
             with TestSetupRequires.create_sdist() as dist_file:
                 with contexts.tempdir() as temp_install_dir:
                     with contexts.environment(PYTHONPATH=temp_install_dir):
                         ei_params = [
-                            '--index-url', p_index.url,
-                            '--allow-hosts', p_index_loc,
+                            '--index-url', mock_index.url,
                             '--exclude-scripts',
                             '--install-dir', temp_install_dir,
                             dist_file,
@@ -478,10 +483,8 @@ class TestSetupRequires:
                             # fail because it doesn't exist.
                             with pytest.raises(SystemExit):
                                 easy_install_pkg.main(ei_params)
-        # there should have been two or three requests to the server
-        #  (three happens on Python 3.3a)
-        assert 2 <= len(p_index.requests) <= 3
-        assert p_index.requests[0].path == '/does-not-exist/'
+        # there should have been one requests to the server
+        assert [r.path for r in mock_index.requests] == ['/does-not-exist/']
 
     @staticmethod
     @contextlib.contextmanager
@@ -500,7 +503,9 @@ class TestSetupRequires:
                         version="1.0",
                         setup_requires = ['does-not-exist'],
                     )
-                """))])
+                """)),
+                ('setup.cfg', ''),
+            ])
             yield dist_path
 
     use_setup_cfg = (
@@ -632,6 +637,113 @@ class TestSetupRequires:
                 assert len(lines) > 0
                 assert lines[-1].strip() == '42'
 
+    def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch):
+        monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+        monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+        monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url)
+        with contexts.save_pkg_resources_state():
+            with contexts.tempdir() as temp_dir:
+                test_pkg = create_setup_requires_package(
+                    temp_dir, 'python-xlib', '0.19',
+                    setup_attrs=dict(dependency_links=[]))
+                test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
+                with open(test_setup_cfg, 'w') as fp:
+                    fp.write(DALS(
+                        '''
+                        [easy_install]
+                        index_url = https://pypi.org/legacy/
+                        '''))
+                test_setup_py = os.path.join(test_pkg, 'setup.py')
+                with pytest.raises(distutils.errors.DistutilsError):
+                    run_setup(test_setup_py, [str('--version')])
+        assert len(mock_index.requests) == 1
+        assert mock_index.requests[0].path == '/python-xlib/'
+
+    def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch):
+        monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+        monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+        monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url)
+        with contexts.save_pkg_resources_state():
+            with contexts.tempdir() as temp_dir:
+                dep_sdist = os.path.join(temp_dir, 'dep.tar.gz')
+                make_trivial_sdist(dep_sdist, 'dependency', '42')
+                dep_url = path_to_url(dep_sdist, authority='localhost')
+                test_pkg = create_setup_requires_package(
+                    temp_dir,
+                    'python-xlib', '0.19', # Ignored (overriden by setup_attrs).
+                    setup_attrs=dict(setup_requires='dependency @ %s' % dep_url))
+                test_setup_py = os.path.join(test_pkg, 'setup.py')
+                run_setup(test_setup_py, [str('--version')])
+        assert len(mock_index.requests) == 0
+
+    def test_setup_requires_with_allow_hosts(self, mock_index):
+        ''' The `allow-hosts` option in not supported anymore. '''
+        with contexts.save_pkg_resources_state():
+            with contexts.tempdir() as temp_dir:
+                test_pkg = os.path.join(temp_dir, 'test_pkg')
+                test_setup_py = os.path.join(test_pkg, 'setup.py')
+                test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
+                os.mkdir(test_pkg)
+                with open(test_setup_py, 'w') as fp:
+                    fp.write(DALS(
+                        '''
+                        from setuptools import setup
+                        setup(setup_requires='python-xlib')
+                        '''))
+                with open(test_setup_cfg, 'w') as fp:
+                    fp.write(DALS(
+                        '''
+                        [easy_install]
+                        allow_hosts = *
+                        '''))
+                with pytest.raises(distutils.errors.DistutilsError):
+                    run_setup(test_setup_py, [str('--version')])
+        assert len(mock_index.requests) == 0
+
+    def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir):
+        ''' Check `python_requires` is honored. '''
+        monkeypatch.setenv(str('PIP_RETRIES'), str('0'))
+        monkeypatch.setenv(str('PIP_TIMEOUT'), str('0'))
+        monkeypatch.setenv(str('PIP_NO_INDEX'), str('1'))
+        monkeypatch.setenv(str('PIP_VERBOSE'), str('1'))
+        dep_1_0_sdist = 'dep-1.0.tar.gz'
+        dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist))
+        dep_1_0_python_requires = '>=2.7'
+        make_python_requires_sdist(str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires)
+        dep_2_0_sdist = 'dep-2.0.tar.gz'
+        dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist))
+        dep_2_0_python_requires = '!=' + '.'.join(map(str, sys.version_info[:2])) + '.*'
+        make_python_requires_sdist(str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires)
+        index = tmpdir / 'index.html'
+        index.write_text(DALS(
+            '''
+            <!DOCTYPE html>
+            <html><head><title>Links for dep</title></head>
+            <body>
+                <h1>Links for dep</h1>
+                <a href="{dep_1_0_url}" data-requires-python="{dep_1_0_python_requires}">{dep_1_0_sdist}</a><br/>
+                <a href="{dep_2_0_url}" data-requires-python="{dep_2_0_python_requires}">{dep_2_0_sdist}</a><br/>
+            </body>
+            </html>
+            ''').format(
+                dep_1_0_url=dep_1_0_url,
+                dep_1_0_sdist=dep_1_0_sdist,
+                dep_1_0_python_requires=dep_1_0_python_requires,
+                dep_2_0_url=dep_2_0_url,
+                dep_2_0_sdist=dep_2_0_sdist,
+                dep_2_0_python_requires=dep_2_0_python_requires,
+            ), 'utf-8')
+        index_url = path_to_url(str(index))
+        with contexts.save_pkg_resources_state():
+            test_pkg = create_setup_requires_package(
+                str(tmpdir),
+                'python-xlib', '0.19', # Ignored (overriden by setup_attrs).
+                setup_attrs=dict(setup_requires='dep', dependency_links=[index_url]))
+            test_setup_py = os.path.join(test_pkg, 'setup.py')
+            run_setup(test_setup_py, [str('--version')])
+        eggs = list(map(str, pkg_resources.find_distributions(os.path.join(test_pkg, '.eggs'))))
+        assert eggs == ['dep 1.0']
+
 
 def make_trivial_sdist(dist_path, distname, version):
     """
@@ -647,7 +759,9 @@ def make_trivial_sdist(dist_path, distname, version):
                  name=%r,
                  version=%r
              )
-         """ % (distname, version)))])
+         """ % (distname, version))),
+        ('setup.cfg', ''),
+    ])
 
 
 def make_nspkg_sdist(dist_path, distname, version):
@@ -683,12 +797,29 @@ def make_nspkg_sdist(dist_path, distname, version):
     make_sdist(dist_path, files)
 
 
+def make_python_requires_sdist(dist_path, distname, version, python_requires):
+    make_sdist(dist_path, [
+        ('setup.py', DALS("""\
+                          import setuptools
+                          setuptools.setup(
+                              name={name!r},
+                              version={version!r},
+                              python_requires={python_requires!r},
+                          )
+                          """).format(name=distname, version=version,
+                                      python_requires=python_requires)),
+         ('setup.cfg', ''),
+    ])
+
+
 def make_sdist(dist_path, files):
     """
     Create a simple sdist tarball at dist_path, containing the files
     listed in ``files`` as ``(filename, content)`` tuples.
     """
 
+    # Distributions with only one file don't play well with pip.
+    assert len(files) > 1
     with tarfile.open(dist_path, 'w:gz') as dist:
         for filename, content in files:
             file_bytes = io.BytesIO(content.encode('utf-8'))
@@ -721,8 +852,8 @@ def create_setup_requires_package(path, distname='foobar', version='0.1',
     test_pkg = os.path.join(path, 'test_pkg')
     os.mkdir(test_pkg)
 
+    # setup.cfg
     if use_setup_cfg:
-        test_setup_cfg = os.path.join(test_pkg, 'setup.cfg')
         options = []
         metadata = []
         for name in use_setup_cfg:
@@ -734,8 +865,7 @@ def create_setup_requires_package(path, distname='foobar', version='0.1',
             if isinstance(value, (tuple, list)):
                 value = ';'.join(value)
             section.append('%s: %s' % (name, value))
-        with open(test_setup_cfg, 'w') as f:
-            f.write(DALS(
+        test_setup_cfg_contents = DALS(
                 """
                 [metadata]
                 {metadata}
@@ -745,16 +875,19 @@ def create_setup_requires_package(path, distname='foobar', version='0.1',
             ).format(
                 options='\n'.join(options),
                 metadata='\n'.join(metadata),
-            ))
-
-    test_setup_py = os.path.join(test_pkg, 'setup.py')
+            )
+    else:
+        test_setup_cfg_contents = ''
+    with open(os.path.join(test_pkg, 'setup.cfg'), 'w') as f:
+        f.write(test_setup_cfg_contents)
 
+    # setup.py
     if setup_py_template is None:
         setup_py_template = DALS("""\
             import setuptools
             setuptools.setup(**%r)
         """)
-    with open(test_setup_py, 'w') as f:
+    with open(os.path.join(test_pkg, 'setup.py'), 'w') as f:
         f.write(setup_py_template % test_setup_attrs)
 
     foobar_path = os.path.join(path, '%s-%s.tar.gz' % (distname, version))
index 316eb2eddeb87d62b758046418541aa87999e522..0db204baa3c0b40c17f3988ba8c594a533a92aa8 100644 (file)
@@ -524,27 +524,27 @@ class TestEggInfo:
                               [metadata]
                               license_file = LICENSE
                               """),
-            'LICENSE': DALS("Test license")
+            'LICENSE': "Test license"
         }, True), # with license
         ({
             'setup.cfg': DALS("""
                               [metadata]
                               license_file = INVALID_LICENSE
                               """),
-            'LICENSE': DALS("Test license")
+            'LICENSE': "Test license"
         }, False), # with an invalid license
         ({
             'setup.cfg': DALS("""
                               """),
-            'LICENSE': DALS("Test license")
+            'LICENSE': "Test license"
         }, False), # no license_file attribute
         ({
             'setup.cfg': DALS("""
                               [metadata]
                               license_file = LICENSE
                               """),
-            'MANIFEST.in': DALS("exclude LICENSE"),
-            'LICENSE': DALS("Test license")
+            'MANIFEST.in': "exclude LICENSE",
+            'LICENSE': "Test license"
         }, False) # license file is manually excluded
     ])
     def test_setup_cfg_license_file(
@@ -567,6 +567,204 @@ class TestEggInfo:
             assert 'LICENSE' not in sources_text
             assert 'INVALID_LICENSE' not in sources_text # for invalid license test
 
+    @pytest.mark.parametrize("files, incl_licenses, excl_licenses", [
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_files =
+                                  LICENSE-ABC
+                                  LICENSE-XYZ
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-XYZ': "XYZ license"
+        }, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with licenses
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_files = LICENSE-ABC, LICENSE-XYZ
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-XYZ': "XYZ license"
+        }, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with commas
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_files =
+                                  LICENSE-ABC
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-XYZ': "XYZ license"
+        }, ['LICENSE-ABC'], ['LICENSE-XYZ']), # with one license
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_files =
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-XYZ': "XYZ license"
+        }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # empty
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_files = LICENSE-XYZ
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-XYZ': "XYZ license"
+        }, ['LICENSE-XYZ'], ['LICENSE-ABC']), # on same line
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_files =
+                                  LICENSE-ABC
+                                  INVALID_LICENSE
+                              """),
+            'LICENSE-ABC': "Test license"
+        }, ['LICENSE-ABC'], ['INVALID_LICENSE']), # with an invalid license
+        ({
+            'setup.cfg': DALS("""
+                              """),
+            'LICENSE': "Test license"
+        }, [], ['LICENSE']), # no license_files attribute
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_files = LICENSE
+                              """),
+            'MANIFEST.in': "exclude LICENSE",
+            'LICENSE': "Test license"
+        }, [], ['LICENSE']), # license file is manually excluded
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_files =
+                                  LICENSE-ABC
+                                  LICENSE-XYZ
+                              """),
+            'MANIFEST.in': "exclude LICENSE-XYZ",
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-XYZ': "XYZ license"
+        }, ['LICENSE-ABC'], ['LICENSE-XYZ']) # subset is manually excluded
+    ])
+    def test_setup_cfg_license_files(
+            self, tmpdir_cwd, env, files, incl_licenses, excl_licenses):
+        self._create_project()
+        build_files(files)
+
+        environment.run_setup_py(
+            cmd=['egg_info'],
+            pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)])
+        )
+        egg_info_dir = os.path.join('.', 'foo.egg-info')
+
+        with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file:
+            sources_lines = list(line.strip() for line in sources_file)
+
+        for lf in incl_licenses:
+            assert sources_lines.count(lf) == 1
+
+        for lf in excl_licenses:
+            assert sources_lines.count(lf) == 0
+
+    @pytest.mark.parametrize("files, incl_licenses, excl_licenses", [
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_file =
+                              license_files =
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-XYZ': "XYZ license"
+        }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # both empty
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_file =
+                                  LICENSE-ABC
+                                  LICENSE-XYZ
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-XYZ': "XYZ license"
+        }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # license_file is still singular
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_file = LICENSE-ABC
+                              license_files =
+                                  LICENSE-XYZ
+                                  LICENSE-PQR
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-PQR': "PQR license",
+            'LICENSE-XYZ': "XYZ license"
+        }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # combined
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_file = LICENSE-ABC
+                              license_files =
+                                  LICENSE-ABC
+                                  LICENSE-XYZ
+                                  LICENSE-PQR
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-PQR': "PQR license",
+            'LICENSE-XYZ': "XYZ license"
+        }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # duplicate license
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_file = LICENSE-ABC
+                              license_files =
+                                  LICENSE-XYZ
+                              """),
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-PQR': "PQR license",
+            'LICENSE-XYZ': "XYZ license"
+        }, ['LICENSE-ABC', 'LICENSE-XYZ'], ['LICENSE-PQR']), # combined subset
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_file = LICENSE-ABC
+                              license_files =
+                                  LICENSE-XYZ
+                                  LICENSE-PQR
+                              """),
+            'LICENSE-PQR': "Test license"
+        }, ['LICENSE-PQR'], ['LICENSE-ABC', 'LICENSE-XYZ']), # with invalid licenses
+        ({
+            'setup.cfg': DALS("""
+                              [metadata]
+                              license_file = LICENSE-ABC
+                              license_files =
+                                LICENSE-PQR
+                                LICENSE-XYZ
+                              """),
+            'MANIFEST.in': "exclude LICENSE-ABC\nexclude LICENSE-PQR",
+            'LICENSE-ABC': "ABC license",
+            'LICENSE-PQR': "PQR license",
+            'LICENSE-XYZ': "XYZ license"
+        }, ['LICENSE-XYZ'], ['LICENSE-ABC', 'LICENSE-PQR']) # manually excluded
+    ])
+    def test_setup_cfg_license_file_license_files(
+            self, tmpdir_cwd, env, files, incl_licenses, excl_licenses):
+        self._create_project()
+        build_files(files)
+
+        environment.run_setup_py(
+            cmd=['egg_info'],
+            pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)])
+        )
+        egg_info_dir = os.path.join('.', 'foo.egg-info')
+
+        with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file:
+            sources_lines = list(line.strip() for line in sources_file)
+
+        for lf in incl_licenses:
+            assert sources_lines.count(lf) == 1
+
+        for lf in excl_licenses:
+            assert sources_lines.count(lf) == 0
+
     def test_long_description_content_type(self, tmpdir_cwd, env):
         # Test that specifying a `long_description_content_type` keyword arg to
         # the `setup` function results in writing a `Description-Content-Type`
diff --git a/setuptools/tests/test_glibc.py b/setuptools/tests/test_glibc.py
deleted file mode 100644 (file)
index 795fdc5..0000000
+++ /dev/null
@@ -1,52 +0,0 @@
-import warnings
-
-import pytest
-
-from setuptools.glibc import check_glibc_version
-
-__metaclass__ = type
-
-
-@pytest.fixture(params=[
-    "2.20",
-    # used by "linaro glibc", see gh-3588
-    "2.20-2014.11",
-    # weird possibilities that I just made up
-    "2.20+dev",
-    "2.20-custom",
-    "2.20.1",
-    ])
-def two_twenty(request):
-    return request.param
-
-
-@pytest.fixture(params=["asdf", "", "foo.bar"])
-def bad_string(request):
-    return request.param
-
-
-class TestGlibc:
-    def test_manylinux1_check_glibc_version(self, two_twenty):
-        """
-        Test that the check_glibc_version function is robust against weird
-        glibc version strings.
-        """
-        assert check_glibc_version(two_twenty, 2, 15)
-        assert check_glibc_version(two_twenty, 2, 20)
-        assert not check_glibc_version(two_twenty, 2, 21)
-        assert not check_glibc_version(two_twenty, 3, 15)
-        assert not check_glibc_version(two_twenty, 1, 15)
-
-    def test_bad_versions(self, bad_string):
-        """
-        For unparseable strings, warn and return False
-        """
-        with warnings.catch_warnings(record=True) as ws:
-            warnings.filterwarnings("always")
-            assert not check_glibc_version(bad_string, 2, 5)
-            for w in ws:
-                if "Expected glibc version with" in str(w.message):
-                    break
-            else:
-                # Didn't find the warning we were expecting
-                assert False
index 1c0b2b18bbd7e59b2cfeb153c9ec8c1fba2f862b..f1a27f8be8cb29a1cf290c4f5fb6c9c48ec139f9 100644 (file)
@@ -64,7 +64,7 @@ def install_context(request, tmpdir, monkeypatch):
     monkeypatch.setattr('site.USER_BASE', user_base.strpath)
     monkeypatch.setattr('site.USER_SITE', user_site.strpath)
     monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath])
-    monkeypatch.setenv('PYTHONPATH', os.path.pathsep.join(sys.path))
+    monkeypatch.setenv(str('PYTHONPATH'), str(os.path.pathsep.join(sys.path)))
 
     # Set up the command for performing the installation.
     dist = Distribution()
diff --git a/setuptools/tests/test_pep425tags.py b/setuptools/tests/test_pep425tags.py
deleted file mode 100644 (file)
index 30afdec..0000000
+++ /dev/null
@@ -1,170 +0,0 @@
-import sys
-
-import pytest
-from mock import patch
-
-from setuptools import pep425tags
-
-__metaclass__ = type
-
-
-class TestPEP425Tags:
-
-    def mock_get_config_var(self, **kwd):
-        """
-        Patch sysconfig.get_config_var for arbitrary keys.
-        """
-        get_config_var = pep425tags.sysconfig.get_config_var
-
-        def _mock_get_config_var(var):
-            if var in kwd:
-                return kwd[var]
-            return get_config_var(var)
-        return _mock_get_config_var
-
-    def abi_tag_unicode(self, flags, config_vars):
-        """
-        Used to test ABI tags, verify correct use of the `u` flag
-        """
-        config_vars.update({'SOABI': None})
-        base = pep425tags.get_abbr_impl() + pep425tags.get_impl_ver()
-
-        if sys.version_info < (3, 3):
-            config_vars.update({'Py_UNICODE_SIZE': 2})
-            mock_gcf = self.mock_get_config_var(**config_vars)
-            with patch(
-                    'setuptools.pep425tags.sysconfig.get_config_var',
-                    mock_gcf):
-                abi_tag = pep425tags.get_abi_tag()
-                assert abi_tag == base + flags
-
-            config_vars.update({'Py_UNICODE_SIZE': 4})
-            mock_gcf = self.mock_get_config_var(**config_vars)
-            with patch('setuptools.pep425tags.sysconfig.get_config_var',
-                       mock_gcf):
-                abi_tag = pep425tags.get_abi_tag()
-                assert abi_tag == base + flags + 'u'
-
-        else:
-            # On Python >= 3.3, UCS-4 is essentially permanently enabled, and
-            # Py_UNICODE_SIZE is None. SOABI on these builds does not include
-            # the 'u' so manual SOABI detection should not do so either.
-            config_vars.update({'Py_UNICODE_SIZE': None})
-            mock_gcf = self.mock_get_config_var(**config_vars)
-            with patch('setuptools.pep425tags.sysconfig.get_config_var',
-                       mock_gcf):
-                abi_tag = pep425tags.get_abi_tag()
-                assert abi_tag == base + flags
-
-    def test_broken_sysconfig(self):
-        """
-        Test that pep425tags still works when sysconfig is broken.
-        Can be a problem on Python 2.7
-        Issue #1074.
-        """
-        def raises_ioerror(var):
-            raise IOError("I have the wrong path!")
-
-        with patch('setuptools.pep425tags.sysconfig.get_config_var',
-                   raises_ioerror):
-            with pytest.warns(RuntimeWarning):
-                assert len(pep425tags.get_supported())
-
-    def test_no_hyphen_tag(self):
-        """
-        Test that no tag contains a hyphen.
-        """
-        mock_gcf = self.mock_get_config_var(SOABI='cpython-35m-darwin')
-
-        with patch('setuptools.pep425tags.sysconfig.get_config_var',
-                   mock_gcf):
-            supported = pep425tags.get_supported()
-
-        for (py, abi, plat) in supported:
-            assert '-' not in py
-            assert '-' not in abi
-            assert '-' not in plat
-
-    def test_manual_abi_noflags(self):
-        """
-        Test that no flags are set on a non-PyDebug, non-Pymalloc ABI tag.
-        """
-        self.abi_tag_unicode('', {'Py_DEBUG': False, 'WITH_PYMALLOC': False})
-
-    def test_manual_abi_d_flag(self):
-        """
-        Test that the `d` flag is set on a PyDebug, non-Pymalloc ABI tag.
-        """
-        self.abi_tag_unicode('d', {'Py_DEBUG': True, 'WITH_PYMALLOC': False})
-
-    def test_manual_abi_m_flag(self):
-        """
-        Test that the `m` flag is set on a non-PyDebug, Pymalloc ABI tag.
-        """
-        self.abi_tag_unicode('m', {'Py_DEBUG': False, 'WITH_PYMALLOC': True})
-
-    def test_manual_abi_dm_flags(self):
-        """
-        Test that the `dm` flags are set on a PyDebug, Pymalloc ABI tag.
-        """
-        self.abi_tag_unicode('dm', {'Py_DEBUG': True, 'WITH_PYMALLOC': True})
-
-
-class TestManylinux1Tags:
-
-    @patch('setuptools.pep425tags.get_platform', lambda: 'linux_x86_64')
-    @patch('setuptools.glibc.have_compatible_glibc',
-           lambda major, minor: True)
-    def test_manylinux1_compatible_on_linux_x86_64(self):
-        """
-        Test that manylinux1 is enabled on linux_x86_64
-        """
-        assert pep425tags.is_manylinux1_compatible()
-
-    @patch('setuptools.pep425tags.get_platform', lambda: 'linux_i686')
-    @patch('setuptools.glibc.have_compatible_glibc',
-           lambda major, minor: True)
-    def test_manylinux1_compatible_on_linux_i686(self):
-        """
-        Test that manylinux1 is enabled on linux_i686
-        """
-        assert pep425tags.is_manylinux1_compatible()
-
-    @patch('setuptools.pep425tags.get_platform', lambda: 'linux_x86_64')
-    @patch('setuptools.glibc.have_compatible_glibc',
-           lambda major, minor: False)
-    def test_manylinux1_2(self):
-        """
-        Test that manylinux1 is disabled with incompatible glibc
-        """
-        assert not pep425tags.is_manylinux1_compatible()
-
-    @patch('setuptools.pep425tags.get_platform', lambda: 'arm6vl')
-    @patch('setuptools.glibc.have_compatible_glibc',
-           lambda major, minor: True)
-    def test_manylinux1_3(self):
-        """
-        Test that manylinux1 is disabled on arm6vl
-        """
-        assert not pep425tags.is_manylinux1_compatible()
-
-    @patch('setuptools.pep425tags.get_platform', lambda: 'linux_x86_64')
-    @patch('setuptools.glibc.have_compatible_glibc',
-           lambda major, minor: True)
-    @patch('sys.platform', 'linux2')
-    def test_manylinux1_tag_is_first(self):
-        """
-        Test that the more specific tag manylinux1 comes first.
-        """
-        groups = {}
-        for pyimpl, abi, arch in pep425tags.get_supported():
-            groups.setdefault((pyimpl, abi), []).append(arch)
-
-        for arches in groups.values():
-            if arches == ['any']:
-                continue
-            # Expect the most specific arch first:
-            if len(arches) == 3:
-                assert arches == ['manylinux1_x86_64', 'linux_x86_64', 'any']
-            else:
-                assert arches == ['manylinux1_x86_64', 'linux_x86_64']
index 96114595db04b13b675a5d4596f71eca617fa859..986058067bed4a9104a2481845a841eae760d53d 100644 (file)
@@ -1,43 +1,22 @@
-import mock
-from distutils import log
-
-import pytest
-
 from setuptools.command.register import register
 from setuptools.dist import Distribution
+from setuptools.errors import RemovedCommandError
 
+try:
+    from unittest import mock
+except ImportError:
+    import mock
 
-class TestRegisterTest:
-    def test_warns_deprecation(self):
-        dist = Distribution()
-
-        cmd = register(dist)
-        cmd.run_command = mock.Mock()
-        cmd.send_metadata = mock.Mock()
-        cmd.announce = mock.Mock()
-
-        cmd.run()
+import pytest
 
-        cmd.announce.assert_called_with(
-            "WARNING: Registering is deprecated, use twine to upload instead "
-            "(https://pypi.org/p/twine/)",
-            log.WARN
-        )
 
-    def test_warns_deprecation_when_raising(self):
+class TestRegister:
+    def test_register_exception(self):
+        """Ensure that the register command has been properly removed."""
         dist = Distribution()
+        dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())]
 
         cmd = register(dist)
-        cmd.run_command = mock.Mock()
-        cmd.send_metadata = mock.Mock()
-        cmd.send_metadata.side_effect = Exception
-        cmd.announce = mock.Mock()
 
-        with pytest.raises(Exception):
+        with pytest.raises(RemovedCommandError):
             cmd.run()
-
-        cmd.announce.assert_called_with(
-            "WARNING: Registering is deprecated, use twine to upload instead "
-            "(https://pypi.org/p/twine/)",
-            log.WARN
-        )
index 320c6959da2756e578819e7fa379cc6c1fa49a78..7586cb262d4787193ab78d012ba0f75f1adc2e1c 100644 (file)
-import mock
-import os
-import re
-
-from distutils import log
-from distutils.errors import DistutilsError
-
-import pytest
-
 from setuptools.command.upload import upload
 from setuptools.dist import Distribution
-from setuptools.extern import six
-
-
-def _parse_upload_body(body):
-    boundary = u'\r\n----------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
-    entries = []
-    name_re = re.compile(u'^Content-Disposition: form-data; name="([^\"]+)"')
-
-    for entry in body.split(boundary):
-        pair = entry.split(u'\r\n\r\n')
-        if not len(pair) == 2:
-            continue
-
-        key, value = map(six.text_type.strip, pair)
-        m = name_re.match(key)
-        if m is not None:
-            key = m.group(1)
-
-        entries.append((key, value))
-
-    return entries
-
-
-@pytest.fixture
-def patched_upload(tmpdir):
-    class Fix:
-        def __init__(self, cmd, urlopen):
-            self.cmd = cmd
-            self.urlopen = urlopen
-
-        def __iter__(self):
-            return iter((self.cmd, self.urlopen))
-
-        def get_uploaded_metadata(self):
-            request = self.urlopen.call_args_list[0][0][0]
-            body = request.data.decode('utf-8')
-            entries = dict(_parse_upload_body(body))
-
-            return entries
+from setuptools.errors import RemovedCommandError
 
-    class ResponseMock(mock.Mock):
-        def getheader(self, name, default=None):
-            """Mocked getheader method for response object"""
-            return {
-                'content-type': 'text/plain; charset=utf-8',
-            }.get(name.lower(), default)
+try:
+    from unittest import mock
+except ImportError:
+    import mock
 
-    with mock.patch('setuptools.command.upload.urlopen') as urlopen:
-        urlopen.return_value = ResponseMock()
-        urlopen.return_value.getcode.return_value = 200
-        urlopen.return_value.read.return_value = b''
-
-        content = os.path.join(str(tmpdir), "content_data")
-
-        with open(content, 'w') as f:
-            f.write("Some content")
-
-        dist = Distribution()
-        dist.dist_files = [('sdist', '3.7.0', content)]
-
-        cmd = upload(dist)
-        cmd.announce = mock.Mock()
-        cmd.username = 'user'
-        cmd.password = 'hunter2'
-
-        yield Fix(cmd, urlopen)
-
-
-class TestUploadTest:
-    def test_upload_metadata(self, patched_upload):
-        cmd, patch = patched_upload
-
-        # Set the metadata version to 2.1
-        cmd.distribution.metadata.metadata_version = '2.1'
-
-        # Run the command
-        cmd.ensure_finalized()
-        cmd.run()
-
-        # Make sure we did the upload
-        patch.assert_called_once()
-
-        # Make sure the metadata version is correct in the headers
-        entries = patched_upload.get_uploaded_metadata()
-        assert entries['metadata_version'] == '2.1'
-
-    def test_warns_deprecation(self):
-        dist = Distribution()
-        dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())]
-
-        cmd = upload(dist)
-        cmd.upload_file = mock.Mock()
-        cmd.announce = mock.Mock()
-
-        cmd.run()
+import pytest
 
-        cmd.announce.assert_called_once_with(
-            "WARNING: Uploading via this command is deprecated, use twine to "
-            "upload instead (https://pypi.org/p/twine/)",
-            log.WARN
-        )
 
-    def test_warns_deprecation_when_raising(self):
+class TestUpload:
+    def test_upload_exception(self):
+        """Ensure that the register command has been properly removed."""
         dist = Distribution()
         dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())]
 
         cmd = upload(dist)
-        cmd.upload_file = mock.Mock()
-        cmd.upload_file.side_effect = Exception
-        cmd.announce = mock.Mock()
-
-        with pytest.raises(Exception):
-            cmd.run()
-
-        cmd.announce.assert_called_once_with(
-            "WARNING: Uploading via this command is deprecated, use twine to "
-            "upload instead (https://pypi.org/p/twine/)",
-            log.WARN
-        )
-
-    @pytest.mark.parametrize('url', [
-        'https://example.com/a;parameter',    # Has parameters
-        'https://example.com/a?query',        # Has query
-        'https://example.com/a#fragment',     # Has fragment
-        'ftp://example.com',                  # Invalid scheme
-
-    ])
-    def test_upload_file_invalid_url(self, url, patched_upload):
-        patched_upload.urlopen.side_effect = Exception("Should not be reached")
-
-        cmd = patched_upload.cmd
-        cmd.repository = url
-
-        cmd.ensure_finalized()
-        with pytest.raises(AssertionError):
-            cmd.run()
-
-    def test_upload_file_http_error(self, patched_upload):
-        patched_upload.urlopen.side_effect = six.moves.urllib.error.HTTPError(
-            'https://example.com',
-            404,
-            'File not found',
-            None,
-            None
-        )
-
-        cmd = patched_upload.cmd
-        cmd.ensure_finalized()
 
-        with pytest.raises(DistutilsError):
+        with pytest.raises(RemovedCommandError):
             cmd.run()
-
-        cmd.announce.assert_any_call(
-            'Upload failed (404): File not found',
-            log.ERROR)
-
-    def test_upload_file_os_error(self, patched_upload):
-        patched_upload.urlopen.side_effect = OSError("Invalid")
-
-        cmd = patched_upload.cmd
-        cmd.ensure_finalized()
-
-        with pytest.raises(OSError):
-            cmd.run()
-
-        cmd.announce.assert_any_call('Invalid', log.ERROR)
-
-    @mock.patch('setuptools.command.upload.spawn')
-    def test_upload_file_gpg(self, spawn, patched_upload):
-        cmd, urlopen = patched_upload
-
-        cmd.sign = True
-        cmd.identity = "Alice"
-        cmd.dry_run = True
-        content_fname = cmd.distribution.dist_files[0][2]
-        signed_file = content_fname + '.asc'
-
-        with open(signed_file, 'wb') as f:
-            f.write("signed-data".encode('utf-8'))
-
-        cmd.ensure_finalized()
-        cmd.run()
-
-        # Make sure that GPG was called
-        spawn.assert_called_once_with([
-            "gpg", "--detach-sign", "--local-user", "Alice", "-a",
-            content_fname
-        ], dry_run=True)
-
-        # Read the 'signed' data that was transmitted
-        entries = patched_upload.get_uploaded_metadata()
-        assert entries['gpg_signature'] == 'signed-data'
-
-    def test_show_response_no_error(self, patched_upload):
-        # This test is just that show_response doesn't throw an error
-        # It is not really important what the printed response looks like
-        # in a deprecated command, but we don't want to introduce new
-        # errors when importing this function from distutils
-
-        patched_upload.cmd.show_response = True
-        patched_upload.cmd.ensure_finalized()
-        patched_upload.cmd.run()
index 74a1284ce715d34ae7cfe1183ea2b068820f69da..cd3d9313c325e465707c0a2ebfa5df39db967ac7 100644 (file)
@@ -121,14 +121,12 @@ def test_pip_upgrade_from_source(pip_version, virtualenv):
     virtualenv.run('pip install --no-cache-dir --upgrade ' + sdist)
 
 
-def test_test_command_install_requirements(bare_virtualenv, tmpdir):
+def _check_test_command_install_requirements(virtualenv, tmpdir):
     """
     Check the test command will install all required dependencies.
     """
-    bare_virtualenv.run(' && '.join((
-        'cd {source}',
-        'python setup.py develop',
-    )).format(source=SOURCE_DIR))
+    # Install setuptools.
+    virtualenv.run('python setup.py develop', cd=SOURCE_DIR)
 
     def sdist(distname, version):
         dist_path = tmpdir.join('%s-%s.tar.gz' % (distname, version))
@@ -179,12 +177,20 @@ def test_test_command_install_requirements(bare_virtualenv, tmpdir):
             open('success', 'w').close()
             '''))
     # Run test command for test package.
-    bare_virtualenv.run(' && '.join((
+    virtualenv.run(' && '.join((
         'cd {tmpdir}',
         'python setup.py test -s test',
     )).format(tmpdir=tmpdir))
     assert tmpdir.join('success').check()
 
+def test_test_command_install_requirements(virtualenv, tmpdir):
+    # Ensure pip/wheel packages are installed.
+    virtualenv.run("python -c \"__import__('pkg_resources').require(['pip', 'wheel'])\"")
+    _check_test_command_install_requirements(virtualenv, tmpdir)
+
+def test_test_command_install_requirements_when_using_easy_install(bare_virtualenv, tmpdir):
+    _check_test_command_install_requirements(bare_virtualenv, tmpdir)
+
 
 def test_no_missing_dependencies(bare_virtualenv):
     """
index e85a4a7e8dab4a51e286296e59d1e5d3b132505e..d50816c22ad2fc5f4ea4c2f4c3b304263fa0b5bf 100644 (file)
@@ -450,6 +450,34 @@ WHEEL_INSTALL_TESTS = (
         }),
     ),
 
+    dict(
+        id='empty_namespace_package',
+        file_defs={
+            'foobar': {
+                '__init__.py': "__import__('pkg_resources').declare_namespace(__name__)",
+            },
+        },
+        setup_kwargs=dict(
+            namespace_packages=['foobar'],
+            packages=['foobar'],
+        ),
+        install_tree=flatten_tree({
+            'foo-1.0-py{py_version}.egg': [
+                'foo-1.0-py{py_version}-nspkg.pth',
+                {'EGG-INFO': [
+                    'PKG-INFO',
+                    'RECORD',
+                    'WHEEL',
+                    'namespace_packages.txt',
+                    'top_level.txt',
+                ]},
+                {'foobar': [
+                    '__init__.py',
+                ]},
+            ]
+        }),
+    ),
+
     dict(
         id='data_in_package',
         file_defs={
index e11f0a1d912860b0da241dee65f6f76d9c8d45e9..3effd79b3f968203a118b6bae5f99c7ebeffbbe2 100644 (file)
@@ -1,6 +1,7 @@
 """Wheels support."""
 
 from distutils.util import get_platform
+from distutils import log
 import email
 import itertools
 import os
@@ -11,9 +12,9 @@ import zipfile
 import pkg_resources
 import setuptools
 from pkg_resources import parse_version
+from setuptools.extern.packaging.tags import sys_tags
 from setuptools.extern.packaging.utils import canonicalize_name
 from setuptools.extern.six import PY3
-from setuptools import pep425tags
 from setuptools.command.egg_info import write_requirements
 
 
@@ -76,7 +77,7 @@ class Wheel:
 
     def is_compatible(self):
         '''Is the wheel is compatible with the current platform?'''
-        supported_tags = pep425tags.get_supported()
+        supported_tags = set(map(str, sys_tags()))
         return next((True for t in self.tags() if t in supported_tags), False)
 
     def egg_name(self):
@@ -162,11 +163,17 @@ class Wheel:
                 extras_require=extras_require,
             ),
         )
-        write_requirements(
-            setup_dist.get_command_obj('egg_info'),
-            None,
-            os.path.join(egg_info, 'requires.txt'),
-        )
+        # Temporarily disable info traces.
+        log_threshold = log._global_log.threshold
+        log.set_threshold(log.WARN)
+        try:
+            write_requirements(
+                setup_dist.get_command_obj('egg_info'),
+                None,
+                os.path.join(egg_info, 'requires.txt'),
+            )
+        finally:
+            log.set_threshold(log_threshold)
 
     @staticmethod
     def _move_data_entries(destination_eggdir, dist_data):
@@ -206,6 +213,8 @@ class Wheel:
             for mod in namespace_packages:
                 mod_dir = os.path.join(destination_eggdir, *mod.split('.'))
                 mod_init = os.path.join(mod_dir, '__init__.py')
-                if os.path.exists(mod_dir) and not os.path.exists(mod_init):
+                if not os.path.exists(mod_dir):
+                    os.mkdir(mod_dir)
+                if not os.path.exists(mod_init):
                     with open(mod_init, 'w') as fp:
                         fp.write(NAMESPACE_PACKAGE_INIT)
index cb3e67269609b11d7b4e3a81ee679d86b4e0e6f0..1f8bd19d8c1fddf6404aad690a52ed2afa7b5347 100644 (file)
@@ -9,4 +9,4 @@ coverage>=4.5.1
 pytest-cov>=2.5.1
 paver; python_version>="3.6"
 futures; python_version=="2.7"
-pip==18.1  # Temporary workaround for #1644.
+pip>=19.1 # For proper file:// URLs support.
diff --git a/tools/tox_pip.py b/tools/tox_pip.py
new file mode 100644 (file)
index 0000000..5aeca80
--- /dev/null
@@ -0,0 +1,38 @@
+import os
+import shutil
+import subprocess
+import sys
+from glob import glob
+
+VIRTUAL_ENV = os.environ['VIRTUAL_ENV']
+TOX_PIP_DIR = os.path.join(VIRTUAL_ENV, 'pip')
+
+
+def pip(args):
+    # First things first, get a recent (stable) version of pip.
+    if not os.path.exists(TOX_PIP_DIR):
+        subprocess.check_call([sys.executable, '-m', 'pip',
+                               '--disable-pip-version-check',
+                               'install', '-t', TOX_PIP_DIR,
+                               'pip'])
+        shutil.rmtree(glob(os.path.join(TOX_PIP_DIR, 'pip-*.dist-info'))[0])
+    # And use that version.
+    pypath = os.environ.get('PYTHONPATH')
+    pypath = pypath.split(os.pathsep) if pypath is not None else []
+    pypath.insert(0, TOX_PIP_DIR)
+    os.environ['PYTHONPATH'] = os.pathsep.join(pypath)
+    # Disable PEP 517 support when using editable installs.
+    for n, a in enumerate(args):
+        if not a.startswith('-'):
+            if a in 'install' and '-e' in args[n:]:
+                args.insert(n + 1, '--no-use-pep517')
+            break
+    # Fix call for setuptools editable install.
+    for n, a in enumerate(args):
+        if a == '.':
+            args[n] = os.getcwd()
+    subprocess.check_call([sys.executable, '-m', 'pip'] + args, cwd=TOX_PIP_DIR)
+
+
+if __name__ == '__main__':
+    pip(sys.argv[1:])
diff --git a/tox.ini b/tox.ini
index e0eef95a457ed9e3f576da6c71eccdd89bd8df32..5d439cb34bce97a2edb523251c2557b2f4a6ee49 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -7,14 +7,16 @@
 [tox]
 envlist=python
 
+[helpers]
+# Wrapper for calls to pip that make sure the version being used is a
+# up-to-date, and to prevent the current working directory from being
+# added to `sys.path`.
+pip = python {toxinidir}/tools/tox_pip.py
+
 [testenv]
-deps=-rtests/requirements.txt
-# Changed from default (`python -m pip ...`)
-# to prevent the current working directory
-# from being added to `sys.path`.
-install_command=python -c 'import sys; sys.path.remove(""); from pkg_resources import load_entry_point; load_entry_point("pip", "console_scripts", "pip")()' install {opts} {packages}
-# Same as above.
-list_dependencies_command={envbindir}/pip freeze --all
+deps=-r{toxinidir}/tests/requirements.txt
+install_command = {[helpers]pip} install {opts} {packages}
+list_dependencies_command = {[helpers]pip} freeze --all
 setenv=COVERAGE_FILE={toxworkdir}/.coverage.{envname}
 # TODO: The passed environment variables came from copying other tox.ini files
 # These should probably be individually annotated to explain what needs them.