Imported Upstream version 0.42.0 upstream/0.42.0
authorTizenOpenSource <tizenopensrc@samsung.com>
Tue, 6 Feb 2024 04:20:05 +0000 (13:20 +0900)
committerTizenOpenSource <tizenopensrc@samsung.com>
Tue, 6 Feb 2024 04:20:05 +0000 (13:20 +0900)
98 files changed:
LICENSE.txt [new file with mode: 0644]
PKG-INFO [new file with mode: 0644]
README.rst [new file with mode: 0644]
docs/Makefile [new file with mode: 0644]
docs/conf.py [new file with mode: 0644]
docs/development.rst [new file with mode: 0644]
docs/index.rst [new file with mode: 0644]
docs/installing.rst [new file with mode: 0644]
docs/make.bat [new file with mode: 0644]
docs/manpages/wheel.rst [new file with mode: 0644]
docs/news.rst [new file with mode: 0644]
docs/quickstart.rst [new file with mode: 0644]
docs/reference/index.rst [new file with mode: 0644]
docs/reference/wheel_convert.rst [new file with mode: 0644]
docs/reference/wheel_pack.rst [new file with mode: 0644]
docs/reference/wheel_tags.rst [new file with mode: 0644]
docs/reference/wheel_unpack.rst [new file with mode: 0644]
docs/story.rst [new file with mode: 0644]
docs/user_guide.rst [new file with mode: 0644]
pyproject.toml [new file with mode: 0644]
setup.py [new file with mode: 0644]
src/wheel/__init__.py [new file with mode: 0644]
src/wheel/__main__.py [new file with mode: 0644]
src/wheel/_setuptools_logging.py [new file with mode: 0644]
src/wheel/bdist_wheel.py [new file with mode: 0644]
src/wheel/cli/__init__.py [new file with mode: 0644]
src/wheel/cli/convert.py [new file with mode: 0644]
src/wheel/cli/pack.py [new file with mode: 0644]
src/wheel/cli/tags.py [new file with mode: 0644]
src/wheel/cli/unpack.py [new file with mode: 0644]
src/wheel/macosx_libfile.py [new file with mode: 0644]
src/wheel/metadata.py [new file with mode: 0644]
src/wheel/util.py [new file with mode: 0644]
src/wheel/vendored/__init__.py [new file with mode: 0644]
src/wheel/vendored/packaging/__init__.py [new file with mode: 0644]
src/wheel/vendored/packaging/_elffile.py [new file with mode: 0644]
src/wheel/vendored/packaging/_manylinux.py [new file with mode: 0644]
src/wheel/vendored/packaging/_musllinux.py [new file with mode: 0644]
src/wheel/vendored/packaging/_parser.py [new file with mode: 0644]
src/wheel/vendored/packaging/_structures.py [new file with mode: 0644]
src/wheel/vendored/packaging/_tokenizer.py [new file with mode: 0644]
src/wheel/vendored/packaging/markers.py [new file with mode: 0644]
src/wheel/vendored/packaging/requirements.py [new file with mode: 0644]
src/wheel/vendored/packaging/specifiers.py [new file with mode: 0644]
src/wheel/vendored/packaging/tags.py [new file with mode: 0644]
src/wheel/vendored/packaging/utils.py [new file with mode: 0644]
src/wheel/vendored/packaging/version.py [new file with mode: 0644]
src/wheel/vendored/vendor.txt [new file with mode: 0644]
src/wheel/wheelfile.py [new file with mode: 0644]
tests/cli/eggnames.txt [new file with mode: 0644]
tests/cli/test_convert.py [new file with mode: 0644]
tests/cli/test_pack.py [new file with mode: 0644]
tests/cli/test_tags.py [new file with mode: 0644]
tests/cli/test_unpack.py [new file with mode: 0644]
tests/conftest.py [new file with mode: 0644]
tests/test_bdist_wheel.py [new file with mode: 0644]
tests/test_macosx_libfile.py [new file with mode: 0644]
tests/test_metadata.py [new file with mode: 0644]
tests/test_sdist.py [new file with mode: 0644]
tests/test_tagopt.py [new file with mode: 0644]
tests/test_wheelfile.py [new file with mode: 0644]
tests/testdata/abi3extension.dist/extension.c [new file with mode: 0644]
tests/testdata/abi3extension.dist/setup.cfg [new file with mode: 0644]
tests/testdata/abi3extension.dist/setup.py [new file with mode: 0644]
tests/testdata/commasinfilenames.dist/mypackage/__init__.py [new file with mode: 0644]
tests/testdata/commasinfilenames.dist/mypackage/data/1,2,3.txt [new file with mode: 0644]
tests/testdata/commasinfilenames.dist/mypackage/data/__init__.py [new file with mode: 0644]
tests/testdata/commasinfilenames.dist/setup.py [new file with mode: 0644]
tests/testdata/commasinfilenames.dist/testrepo-0.1.0/mypackage/__init__.py [new file with mode: 0644]
tests/testdata/complex-dist/complexdist/__init__.py [new file with mode: 0644]
tests/testdata/complex-dist/setup.py [new file with mode: 0644]
tests/testdata/extension.dist/extension.c [new file with mode: 0644]
tests/testdata/extension.dist/setup.py [new file with mode: 0644]
tests/testdata/headers.dist/header.h [new file with mode: 0644]
tests/testdata/headers.dist/headersdist.py [new file with mode: 0644]
tests/testdata/headers.dist/setup.cfg [new file with mode: 0644]
tests/testdata/headers.dist/setup.py [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/libb.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib.c [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_10.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_10_10.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_10_fat.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_14.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_14_fat.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_6.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_6_386.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_6_fat.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib [new file with mode: 0755]
tests/testdata/macosx_minimal_system_version/test_lib_11.dylib [new file with mode: 0644]
tests/testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib [new file with mode: 0644]
tests/testdata/simple.dist/setup.py [new file with mode: 0644]
tests/testdata/simple.dist/simpledist/__init__.py [new file with mode: 0644]
tests/testdata/test-1.0-py2.py3-none-any.whl [new file with mode: 0644]
tests/testdata/unicode.dist/setup.py [new file with mode: 0644]
tests/testdata/unicode.dist/unicodedist/__init__.py [new file with mode: 0644]
tests/testdata/unicode.dist/unicodedist/åäö_日本語.py [new file with mode: 0644]

diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644 (file)
index 0000000..a31470f
--- /dev/null
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2012 Daniel Holth <dholth@fastmail.fm> and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644 (file)
index 0000000..d73e495
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,62 @@
+Metadata-Version: 2.1
+Name: wheel
+Version: 0.42.0
+Summary: A built-package format for Python
+Keywords: wheel,packaging
+Author-email: Daniel Holth <dholth@fastmail.fm>
+Maintainer-email: Alex Grönholm <alex.gronholm@nextday.fi>
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Requires-Dist: pytest >= 6.0.0 ; extra == "test"
+Requires-Dist: setuptools >= 65 ; extra == "test"
+Project-URL: Changelog, https://wheel.readthedocs.io/en/stable/news.html
+Project-URL: Documentation, https://wheel.readthedocs.io/
+Project-URL: Issue Tracker, https://github.com/pypa/wheel/issues
+Project-URL: Source, https://github.com/pypa/wheel
+Provides-Extra: test
+
+wheel
+=====
+
+This library is the reference implementation of the Python wheel packaging
+standard, as defined in `PEP 427`_.
+
+It has two different roles:
+
+#. A setuptools_ extension for building wheels that provides the
+   ``bdist_wheel`` setuptools command
+#. A command line tool for working with wheel files
+
+It should be noted that wheel is **not** intended to be used as a library, and
+as such there is no stable, public API.
+
+.. _PEP 427: https://www.python.org/dev/peps/pep-0427/
+.. _setuptools: https://pypi.org/project/setuptools/
+
+Documentation
+-------------
+
+The documentation_ can be found on Read The Docs.
+
+.. _documentation: https://wheel.readthedocs.io/
+
+Code of Conduct
+---------------
+
+Everyone interacting in the wheel project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
+
diff --git a/README.rst b/README.rst
new file mode 100644 (file)
index 0000000..1cf194e
--- /dev/null
@@ -0,0 +1,32 @@
+wheel
+=====
+
+This library is the reference implementation of the Python wheel packaging
+standard, as defined in `PEP 427`_.
+
+It has two different roles:
+
+#. A setuptools_ extension for building wheels that provides the
+   ``bdist_wheel`` setuptools command
+#. A command line tool for working with wheel files
+
+It should be noted that wheel is **not** intended to be used as a library, and
+as such there is no stable, public API.
+
+.. _PEP 427: https://www.python.org/dev/peps/pep-0427/
+.. _setuptools: https://pypi.org/project/setuptools/
+
+Documentation
+-------------
+
+The documentation_ can be found on Read The Docs.
+
+.. _documentation: https://wheel.readthedocs.io/
+
+Code of Conduct
+---------------
+
+Everyone interacting in the wheel project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644 (file)
index 0000000..54b44f9
--- /dev/null
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = _build
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+       @echo "Please use \`make <target>' where <target> is one of"
+       @echo "  html       to make standalone HTML files"
+       @echo "  dirhtml    to make HTML files named index.html in directories"
+       @echo "  singlehtml to make a single large HTML file"
+       @echo "  pickle     to make pickle files"
+       @echo "  json       to make JSON files"
+       @echo "  htmlhelp   to make HTML files and a HTML help project"
+       @echo "  qthelp     to make HTML files and a qthelp project"
+       @echo "  devhelp    to make HTML files and a Devhelp project"
+       @echo "  epub       to make an epub"
+       @echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+       @echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+       @echo "  text       to make text files"
+       @echo "  man        to make manual pages"
+       @echo "  texinfo    to make Texinfo files"
+       @echo "  info       to make Texinfo files and run them through makeinfo"
+       @echo "  gettext    to make PO message catalogs"
+       @echo "  changes    to make an overview of all changed/added/deprecated items"
+       @echo "  linkcheck  to check all external links for integrity"
+       @echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+       -rm -rf $(BUILDDIR)/*
+
+html:
+       $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+       @echo
+       @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+       $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+       @echo
+       @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+       $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+       @echo
+       @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+       $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+       @echo
+       @echo "Build finished; now you can process the pickle files."
+
+json:
+       $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+       @echo
+       @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+       $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+       @echo
+       @echo "Build finished; now you can run HTML Help Workshop with the" \
+             ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+       $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+       @echo
+       @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+             ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+       @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/wheel.qhcp"
+       @echo "To view the help file:"
+       @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/wheel.qhc"
+
+devhelp:
+       $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+       @echo
+       @echo "Build finished."
+       @echo "To view the help file:"
+       @echo "# mkdir -p $$HOME/.local/share/devhelp/wheel"
+       @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/wheel"
+       @echo "# devhelp"
+
+epub:
+       $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+       @echo
+       @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+       $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+       @echo
+       @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+       @echo "Run \`make' in that directory to run these through (pdf)latex" \
+             "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+       $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+       @echo "Running LaTeX files through pdflatex..."
+       $(MAKE) -C $(BUILDDIR)/latex all-pdf
+       @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+       $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+       @echo
+       @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+       $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+       @echo
+       @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+       $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+       @echo
+       @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+       @echo "Run \`make' in that directory to run these through makeinfo" \
+             "(use \`make info' here to do that automatically)."
+
+info:
+       $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+       @echo "Running Texinfo files through makeinfo..."
+       make -C $(BUILDDIR)/texinfo info
+       @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+       $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+       @echo
+       @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+       $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+       @echo
+       @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+       $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+       @echo
+       @echo "Link check complete; look for any errors in the above output " \
+             "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+       $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+       @echo "Testing of doctests in the sources finished, look at the " \
+             "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644 (file)
index 0000000..d6b0a3f
--- /dev/null
@@ -0,0 +1,254 @@
+#
+# wheel documentation build configuration file, created by
+# sphinx-quickstart on Thu Jul 12 00:14:09 2012.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+from __future__ import annotations
+
+import os
+import re
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+# sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ["sphinx.ext.intersphinx"]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# The suffix of source filenames.
+source_suffix = ".rst"
+
+# The encoding of source files.
+# source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = "index"
+
+# General information about the project.
+project = "wheel"
+copyright = "2012, Daniel Holth"
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+here = os.path.abspath(os.path.dirname(__file__))
+with open(
+    os.path.join(here, "..", "src", "wheel", "__init__.py"), encoding="utf8"
+) as version_file:
+    match = re.search(r'__version__ = "((\d+\.\d+\.\d+).*)"', version_file.read())
+    # The short X.Y version.
+    version = match.group(2)
+
+    # The full version, including alpha/beta/rc tags.
+    release = match.group(1)
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+# language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ["_build"]
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+highlight_language = "bash"
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+intersphinx_mapping = {"python": ("https://docs.python.org/", None)}
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = "default"
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+# html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+# html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_domain_indices = True
+
+# If false, no index is generated.
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "wheeldoc"
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+    # The paper size ('letterpaper' or 'a4paper').
+    # 'papersize': 'letterpaper',
+    # The font size ('10pt', '11pt' or '12pt').
+    # 'pointsize': '10pt',
+    # Additional stuff for the LaTeX preamble.
+    # 'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+    ("index", "wheel.tex", "wheel Documentation", "Daniel Holth", "manual"),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    ("manpages/wheel", "wheel", "wheel Documentation", ["Daniel Holth"], 1),
+]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+    (
+        "index",
+        "wheel",
+        "wheel Documentation",
+        "Daniel Holth",
+        "wheel",
+        "One line description of project.",
+        "Miscellaneous",
+    ),
+]
+
+# Documents to append as an appendix to all manuals.
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+# texinfo_show_urls = 'footnote'
diff --git a/docs/development.rst b/docs/development.rst
new file mode 100644 (file)
index 0000000..75b3a79
--- /dev/null
@@ -0,0 +1,96 @@
+Development
+===========
+
+Pull Requests
+-------------
+
+- Submit Pull Requests against the ``main`` branch.
+- Provide a good description of what you're doing and why.
+- Provide tests that cover your changes and try to run the tests locally first.
+
+**Example**. Assuming you set up GitHub account, forked wheel repository from
+https://github.com/pypa/wheel to your own page via web interface, and your
+fork is located at https://github.com/yourname/wheel
+
+::
+
+  $ git clone git@github.com:pypa/wheel.git
+  $ cd wheel
+  # ...
+  $ git diff
+  $ git add <modified> ...
+  $ git status
+  $ git commit
+
+You may reference relevant issues in commit messages (like #1259) to
+make GitHub link issues and commits together, and with phrase like
+"fixes #1259" you can even close relevant issues automatically. Now
+push the changes to your fork::
+
+  $ git push git@github.com:yourname/wheel.git
+
+Open Pull Requests page at https://github.com/yourname/wheel/pulls and
+click "New pull request". That's it.
+
+Automated Testing
+-----------------
+
+All pull requests and merges to ``main`` branch are tested in `GitHub Actions`_
+based on the workflows in the ``.github`` directory.
+
+The only way to trigger the test suite to run again for a pull request is to
+submit another change to the pull branch.
+
+.. _GitHub Actions: https://github.com/actions
+
+Running Tests Locally
+---------------------
+
+Python requirements: tox_ or pytest_
+
+To run the tests via tox against all matching interpreters::
+
+  $ tox
+
+To run the tests via tox against a specific environment::
+
+  $ tox -e py35
+
+Alternatively, you can run the tests via pytest using your default interpreter::
+
+  $ pip install -e .[test]  # Installs the test dependencies
+  $ pytest                  # Runs the tests with the current interpreter
+
+The above pip install command will replace the current interpreter's installed
+wheel package with the development package being tested. If you use this
+workflow, it is recommended to run it under a virtualenv_.
+
+.. _tox: https://pypi.org/project/tox/
+.. _pytest: https://pypi.org/project/pytest/
+.. _virtualenv: https://pypi.org/project/virtualenv/
+
+Getting Involved
+----------------
+
+The wheel project welcomes help in the following ways:
+
+- Making Pull Requests for code, tests, or docs.
+- Commenting on open issues and pull requests.
+- Helping to answer questions on the `mailing list`_.
+
+.. _`mailing list`: https://mail.python.org/mailman/listinfo/distutils-sig
+
+Release Process
+---------------
+
+To make a new release:
+
+#. Edit ``docs/news.rst`` and replace ``**UNRELEASED**`` with a release version
+   and date, like ``**X.Y.Z (20XX-YY-ZZ)**``.
+#. Replace the ``__version__`` attribute in ``src/wheel/__init__.py`` with the
+   same version number as above (without the date of course).
+#. Create a new git tag matching the version exactly
+#. Push the new tag to GitHub
+
+Pushing a new tag to GitHub will trigger the publish workflow which package the
+project and publish the resulting artifacts to PyPI.
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644 (file)
index 0000000..5811e9f
--- /dev/null
@@ -0,0 +1,31 @@
+wheel
+=====
+
+`User list <http://groups.google.com/group/python-virtualenv>`_ |
+`Dev list <https://mail.python.org/archives/list/distutils-sig@python.org/>`_ |
+`GitHub <https://github.com/pypa/wheel>`_ |
+`PyPI <https://pypi.org/pypi/wheel/>`_ |
+User IRC: #pypa |
+Dev IRC: #pypa-dev
+
+This library is the reference implementation of the Python wheel packaging
+standard, as defined in `PEP 427`_.
+
+It has two different roles:
+
+#. A setuptools_ extension for building wheels that provides the
+   ``bdist_wheel`` setuptools command
+#. A command line tool for working with wheel files
+
+.. _PEP 427: https://www.python.org/dev/peps/pep-0427/
+.. _setuptools: https://pypi.org/project/setuptools/
+
+.. toctree::
+   :maxdepth: 2
+
+   quickstart
+   installing
+   user_guide
+   reference/index
+   development
+   news
diff --git a/docs/installing.rst b/docs/installing.rst
new file mode 100644 (file)
index 0000000..2f94efa
--- /dev/null
@@ -0,0 +1,24 @@
+Installation
+============
+
+You can use pip_ to install wheel::
+
+    pip install wheel
+
+If you do not have pip_ installed, see its documentation for
+`installation instructions`_.
+
+If you prefer using your system package manager to install Python packages, you
+can typically find the wheel package under one of the following package names:
+
+* python-wheel
+* python3-wheel
+
+.. _pip: https://pip.pypa.io/en/stable/
+.. _installation instructions: https://pip.pypa.io/en/stable/installing/
+
+Python and OS Compatibility
+---------------------------
+
+wheel should work on any Python implementation and operating system and is
+compatible with Python version 3.7 and upwards.
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100644 (file)
index 0000000..8083236
--- /dev/null
@@ -0,0 +1,190 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+       set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+       set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+       set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+       :help
+       echo.Please use `make ^<target^>` where ^<target^> is one of
+       echo.  html       to make standalone HTML files
+       echo.  dirhtml    to make HTML files named index.html in directories
+       echo.  singlehtml to make a single large HTML file
+       echo.  pickle     to make pickle files
+       echo.  json       to make JSON files
+       echo.  htmlhelp   to make HTML files and a HTML help project
+       echo.  qthelp     to make HTML files and a qthelp project
+       echo.  devhelp    to make HTML files and a Devhelp project
+       echo.  epub       to make an epub
+       echo.  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+       echo.  text       to make text files
+       echo.  man        to make manual pages
+       echo.  texinfo    to make Texinfo files
+       echo.  gettext    to make PO message catalogs
+       echo.  changes    to make an overview over all changed/added/deprecated items
+       echo.  linkcheck  to check all external links for integrity
+       echo.  doctest    to run all doctests embedded in the documentation if enabled
+       goto end
+)
+
+if "%1" == "clean" (
+       for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+       del /q /s %BUILDDIR%\*
+       goto end
+)
+
+if "%1" == "html" (
+       %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+       goto end
+)
+
+if "%1" == "dirhtml" (
+       %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+       goto end
+)
+
+if "%1" == "singlehtml" (
+       %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+       goto end
+)
+
+if "%1" == "pickle" (
+       %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; now you can process the pickle files.
+       goto end
+)
+
+if "%1" == "json" (
+       %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; now you can process the JSON files.
+       goto end
+)
+
+if "%1" == "htmlhelp" (
+       %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+       goto end
+)
+
+if "%1" == "qthelp" (
+       %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+       echo.^> qcollectiongenerator %BUILDDIR%\qthelp\wheel.qhcp
+       echo.To view the help file:
+       echo.^> assistant -collectionFile %BUILDDIR%\qthelp\wheel.ghc
+       goto end
+)
+
+if "%1" == "devhelp" (
+       %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished.
+       goto end
+)
+
+if "%1" == "epub" (
+       %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The epub file is in %BUILDDIR%/epub.
+       goto end
+)
+
+if "%1" == "latex" (
+       %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+       goto end
+)
+
+if "%1" == "text" (
+       %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The text files are in %BUILDDIR%/text.
+       goto end
+)
+
+if "%1" == "man" (
+       %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The manual pages are in %BUILDDIR%/man.
+       goto end
+)
+
+if "%1" == "texinfo" (
+       %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+       goto end
+)
+
+if "%1" == "gettext" (
+       %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+       goto end
+)
+
+if "%1" == "changes" (
+       %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.The overview file is in %BUILDDIR%/changes.
+       goto end
+)
+
+if "%1" == "linkcheck" (
+       %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+       goto end
+)
+
+if "%1" == "doctest" (
+       %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+       if errorlevel 1 exit /b 1
+       echo.
+       echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+       goto end
+)
+
+:end
diff --git a/docs/manpages/wheel.rst b/docs/manpages/wheel.rst
new file mode 100644 (file)
index 0000000..df1ac2a
--- /dev/null
@@ -0,0 +1,46 @@
+:orphan:
+
+wheel manual page
+=================
+
+Synopsis
+--------
+
+**wheel** [*command*] [*options*]
+
+
+Description
+-----------
+
+:program:`wheel` installs and operates on `PEP 427`_ format binary wheels.
+
+
+Commands
+--------
+  ``unpack``
+    Unpack wheel
+
+  ``pack``
+    Repack a previously unpacked wheel
+
+  ``convert``
+    Convert egg or wininst to wheel
+
+  ``tags``
+    Change the tags on a wheel file
+
+  ``version``
+    Print version and exit
+
+  ``help``
+    Show this help
+
+Try ``wheel <command> --help`` for more information.
+
+
+Options
+-------
+  -h, --help            show this help message and exit
+
+
+.. _`PEP 427`: https://www.python.org/dev/peps/pep-0427/
diff --git a/docs/news.rst b/docs/news.rst
new file mode 100644 (file)
index 0000000..c56faf1
--- /dev/null
@@ -0,0 +1,538 @@
+Release Notes
+=============
+
+**0.42.0 (2023-11-26)**
+
+- Allowed removing build tag with ``wheel tags --build ""``
+- Fixed ``wheel pack`` and ``wheel tags`` writing updated ``WHEEL`` fields after a
+  blank line, causing other tools to ignore them
+- Fixed ``wheel pack`` and ``wheel tags`` writing ``WHEEL`` with CRLF line endings or
+  a mix of CRLF and LF
+- Fixed ``wheel pack --build-number ""`` not removing build tag from ``WHEEL``
+  (above changes by Benjamin Gilbert)
+
+**0.41.3 (2023-10-30)**
+
+- Updated vendored ``packaging`` to 23.2
+- Fixed ABI tag generation for CPython 3.13a1 on Windows (PR by Sam Gross)
+
+**0.41.2 (2023-08-22)**
+
+- Fixed platform tag detection for GraalPy and 32-bit python running on an aarch64
+  kernel (PR by Matthieu Darbois)
+- Fixed ``wheel tags`` to not list directories in ``RECORD`` files
+  (PR by Mike Taves)
+- Fixed ABI tag generation for GraalPy (PR by Michael Simacek)
+
+**0.41.1 (2023-08-05)**
+
+- Fixed naming of the ``data_dir`` directory in the presence of local version segment
+  given via ``egg_info.tag_build`` (PR by Anderson Bravalheri)
+- Fixed version specifiers in ``Requires-Dist`` being wrapped in parentheses
+
+**0.41.0 (2023-07-22)**
+
+- Added full support of the build tag syntax to ``wheel tags`` (you can now set a build
+  tag like ``123mytag``)
+- Fixed warning on Python 3.12 about ``onerror`` deprecation. (PR by Henry Schreiner)
+- Support testing on Python 3.12 betas (PR by Ewout ter Hoeven)
+
+**0.40.0 (2023-03-14)**
+
+- Added a ``wheel tags`` command to modify tags on an existing wheel
+  (PR by Henry Schreiner)
+- Updated vendored ``packaging`` to 23.0
+- ``wheel unpack`` now preserves the executable attribute of extracted files
+- Fixed spaces in platform names not being converted to underscores (PR by David Tucker)
+- Fixed ``RECORD`` files in generated wheels missing the regular file attribute
+- Fixed ``DeprecationWarning`` about the use of the deprecated ``pkg_resources`` API
+  (PR by Thomas Grainger)
+- Wheel now uses flit-core as a build backend (PR by Henry Schreiner)
+
+**0.38.4 (2022-11-09)**
+
+- Fixed ``PKG-INFO`` conversion in ``bdist_wheel`` mangling UTF-8 header values in
+  ``METADATA`` (PR by Anderson Bravalheri)
+
+**0.38.3 (2022-11-08)**
+
+- Fixed install failure when used with ``--no-binary``, reported on Ubuntu 20.04, by
+  removing ``setup_requires`` from ``setup.cfg``
+
+**0.38.2 (2022-11-05)**
+
+- Fixed regression introduced in v0.38.1 which broke parsing of wheel file names with
+  multiple platform tags
+
+**0.38.1 (2022-11-04)**
+
+- Removed install dependency on setuptools
+- The future-proof fix in 0.36.0 for converting PyPy's SOABI into a abi tag was
+  faulty. Fixed so that future changes in the SOABI will not change the tag.
+
+**0.38.0 (2022-10-21)**
+
+- Dropped support for Python < 3.7
+- Updated vendored ``packaging`` to 21.3
+- Replaced all uses of ``distutils`` with ``setuptools``
+- The handling of ``license_files`` (including glob patterns and default
+  values) is now delegated to ``setuptools>=57.0.0`` (#466).
+  The package dependencies were updated to reflect this change.
+- Fixed potential DoS attack via the ``WHEEL_INFO_RE`` regular expression
+- Fixed ``ValueError: ZIP does not support timestamps before 1980`` when using
+  ``SOURCE_DATE_EPOCH=0`` or when on-disk timestamps are earlier than 1980-01-01. Such
+  timestamps are now changed to the minimum value before packaging.
+
+**0.37.1 (2021-12-22)**
+
+- Fixed ``wheel pack`` duplicating the ``WHEEL`` contents when the build number has
+  changed (#415)
+- Fixed parsing of file names containing commas in ``RECORD`` (PR by Hood Chatham)
+
+**0.37.0 (2021-08-09)**
+
+- Added official Python 3.10 support
+- Updated vendored ``packaging`` library to v20.9
+
+**0.36.2 (2020-12-13)**
+
+- Updated vendored ``packaging`` library to v20.8
+- Fixed wheel sdist missing ``LICENSE.txt``
+- Don't use default ``macos/arm64`` deployment target in calculating the
+  platform tag for fat binaries (PR by Ronald Oussoren)
+
+**0.36.1 (2020-12-04)**
+
+- Fixed ``AssertionError`` when ``MACOSX_DEPLOYMENT_TARGET`` was set to ``11``
+  (PR by Grzegorz Bokota and François-Xavier Coudert)
+- Fixed regression introduced in 0.36.0 on Python 2.7 when a custom generator
+  name was passed as unicode (Scikit-build)
+  (``TypeError: 'unicode' does not have the buffer interface``)
+
+**0.36.0 (2020-12-01)**
+
+- Added official Python 3.9 support
+- Updated vendored ``packaging`` library to v20.7
+- Switched to always using LF as line separator when generating ``WHEEL`` files
+  (on Windows, CRLF was being used instead)
+- The ABI tag is taken from  the sysconfig SOABI value. On PyPy the SOABI value
+  is ``pypy37-pp73`` which is not compliant with PEP 3149, as it should have
+  both the API tag and the platform tag. This change future-proofs any change
+  in PyPy's SOABI tag to make sure only the ABI tag is used by wheel.
+- Fixed regression and test for ``bdist_wheel --plat-name``. It was ignored for
+  C extensions in v0.35, but the regression was not detected by tests.
+
+**0.35.1 (2020-08-14)**
+
+- Replaced install dependency on ``packaging`` with a vendored copy of its
+  ``tags`` module
+- Fixed ``bdist_wheel`` not working on FreeBSD due to mismatching platform tag
+  name (it was not being converted to lowercase)
+
+**0.35.0 (2020-08-13)**
+
+- Switched to the packaging_ library for computing wheel tags
+- Fixed a resource leak in ``WheelFile.open()`` (PR by Jon Dufresne)
+
+.. _packaging: https://pypi.org/project/packaging/
+
+**0.34.2 (2020-01-30)**
+
+- Fixed installation of ``wheel`` from sdist on environments without Unicode
+  file name support
+
+**0.34.1 (2020-01-27)**
+
+- Fixed installation of ``wheel`` from sdist which was broken due to a chicken
+  and egg problem with PEP 517 and setuptools_scm
+
+**0.34.0 (2020-01-27)**
+
+- Dropped Python 3.4 support
+- Added automatic platform tag detection for macOS binary wheels
+  (PR by Grzegorz Bokota)
+- Added the ``--compression=`` option to the ``bdist_wheel`` command
+- Fixed PyPy tag generation to work with the updated semantics (#328)
+- Updated project packaging and testing configuration for :pep:`517`
+- Moved the contents of setup.py to setup.cfg
+- Fixed duplicate RECORD file when using ``wheel pack`` on Windows
+- Fixed bdist_wheel failing at cleanup on Windows with a read-only source tree
+- Fixed ``wheel pack`` not respecting the existing build tag in ``WHEEL``
+- Switched the project to use the "src" layout
+- Switched to setuptools_scm_ for versioning
+
+ .. _setuptools_scm: https://github.com/pypa/setuptools_scm/
+
+**0.33.6 (2019-08-18)**
+
+- Fixed regression from 0.33.5 that broke building binary wheels against the
+  limited ABI
+- Fixed egg2wheel compatibility with the future release of Python 3.10
+  (PR by Anthony Sottile)
+
+**0.33.5 (2019-08-17)**
+
+- Don't add the ``m`` ABI flag to wheel names on Python 3.8 (PR by rdb)
+- Updated ``MANIFEST.in`` to include many previously omitted files in the sdist
+
+**0.33.4 (2019-05-12)**
+
+- Reverted PR #289 (adding directory entries to the wheel file) due to
+  incompatibility with ``distlib.wheel``
+
+**0.33.3 (2019-05-10)** (redacted release)
+
+- Fixed wheel build failures on some systems due to all attributes being
+  preserved (PR by Matt Wozniski)
+
+**0.33.2 (2019-05-08)** (redacted release)
+
+- Fixed empty directories missing from the wheel (PR by Jason R. Coombs)
+
+**0.33.1 (2019-02-19)**
+
+- Fixed the ``--build-number`` option for ``wheel pack`` not being applied
+
+**0.33.0 (2019-02-11)**
+
+- Added the ``--build-number`` option to the ``wheel pack`` command
+- Fixed bad shebangs sneaking into wheels
+- Fixed documentation issue with ``wheel pack`` erroneously being called
+  ``wheel repack``
+- Fixed filenames with "bad" characters (like commas) not being quoted in
+  ``RECORD`` (PR by Paul Moore)
+- Sort requirements extras to ensure deterministic builds
+  (PR by PoncinMatthieu)
+- Forced ``inplace = False`` when building a C extension for the wheel
+
+**0.32.3 (2018-11-18)**
+
+- Fixed compatibility with Python 2.7.0 – 2.7.3
+- Fixed handling of direct URL requirements with markers (PR by Benoit Pierre)
+
+**0.32.2 (2018-10-20)**
+
+- Fixed build number appearing in the ``.dist-info`` directory name
+- Made wheel file name parsing more permissive
+- Fixed wrong Python tag in wheels converted from eggs
+  (PR by John T. Wodder II)
+
+**0.32.1 (2018-10-03)**
+
+- Fixed ``AttributeError: 'Requirement' object has no attribute 'url'`` on
+  setuptools/pkg_resources versions older than 18.8 (PR by Benoit Pierre)
+- Fixed ``AttributeError: 'module' object has no attribute
+  'algorithms_available'`` on Python < 2.7.9 (PR by Benoit Pierre)
+- Fixed permissions on the generated ``.dist-info/RECORD`` file
+
+**0.32.0 (2018-09-29)**
+
+- Removed wheel signing and verifying features
+- Removed the "wheel install" and "wheel installscripts" commands
+- Added the ``wheel pack`` command
+- Allowed multiple license files to be specified using the ``license_files``
+  option
+- Deprecated the ``license_file`` option
+- Eliminated duplicate lines from generated requirements in
+  ``.dist-info/METADATA`` (thanks to Wim Glenn for the contribution)
+- Fixed handling of direct URL specifiers in requirements
+  (PR by Benoit Pierre)
+- Fixed canonicalization of extras (PR by Benoit Pierre)
+- Warn when the deprecated ``[wheel]`` section is used in ``setup.cfg``
+  (PR by Jon Dufresne)
+
+**0.31.1 (2018-05-13)**
+
+- Fixed arch as ``None`` when converting eggs to wheels
+
+**0.31.0 (2018-04-01)**
+
+- Fixed displaying of errors on Python 3
+- Fixed single digit versions in wheel files not being properly recognized
+- Fixed wrong character encodings being used (instead of UTF-8) to read and
+  write ``RECORD`` (this sometimes crashed bdist_wheel too)
+- Enabled Zip64 support in wheels by default
+- Metadata-Version is now 2.1
+- Dropped DESCRIPTION.rst and metadata.json from the list of generated files
+- Dropped support for the non-standard, undocumented ``provides-extra`` and
+  ``requires-dist`` keywords in setup.cfg metadata
+- Deprecated all wheel signing and signature verification commands
+- Removed the (already defunct) ``tool`` extras from setup.py
+
+**0.30.0 (2017-09-10)**
+
+- Added py-limited-api {cp32|cp33|cp34|...} flag to produce cpNN.abi3.{arch}
+  tags on CPython 3.
+- Documented the ``license_file`` metadata key
+- Improved Python, abi tagging for ``wheel convert``. Thanks Ales Erjavec.
+- Fixed ``>`` being prepended to lines starting with "From" in the long
+  description
+- Added support for specifying a build number (as per PEP 427).
+  Thanks Ian Cordasco.
+- Made the order of files in generated ZIP files deterministic.
+  Thanks Matthias Bach.
+- Made the order of requirements in metadata deterministic. Thanks Chris Lamb.
+- Fixed ``wheel install`` clobbering existing files
+- Improved the error message when trying to verify an unsigned wheel file
+- Removed support for Python 2.6, 3.2 and 3.3.
+
+**0.29.0 (2016-02-06)**
+
+- Fix compression type of files in archive (Issue #155, Pull Request #62,
+  thanks Xavier Fernandez)
+
+**0.28.0 (2016-02-05)**
+
+- Fix file modes in archive (Issue #154)
+
+**0.27.0 (2016-02-05)**
+
+- Support forcing a platform tag using ``--plat-name`` on pure-Python wheels,
+  as well as nonstandard platform tags on non-pure wheels (Pull Request #60,
+  Issue #144, thanks Andrés Díaz)
+- Add SOABI tags to platform-specific wheels built for Python 2.X (Pull Request
+  #55, Issue #63, Issue #101)
+- Support reproducible wheel files, wheels that can be rebuilt and will hash to
+  the same values as previous builds (Pull Request #52, Issue #143, thanks
+  Barry Warsaw)
+- Support for changes in keyring >= 8.0 (Pull Request #61, thanks Jason R.
+  Coombs)
+- Use the file context manager when checking if dependency_links.txt is empty,
+  fixes problems building wheels under PyPy on Windows  (Issue #150, thanks
+  Cosimo Lupo)
+- Don't attempt to (recursively) create a build directory ending with ``..``
+  (invalid on all platforms, but code was only executed on Windows) (Issue #91)
+- Added the PyPA Code of Conduct (Pull Request #56)
+
+**0.26.0 (2015-09-18)**
+
+- Fix multiple entrypoint comparison failure on Python 3 (Issue #148)
+
+**0.25.0 (2015-09-16)**
+
+- Add Python 3.5 to tox configuration
+- Deterministic (sorted) metadata
+- Fix tagging for Python 3.5 compatibility
+- Support py2-none-'arch' and py3-none-'arch' tags
+- Treat data-only wheels as pure
+- Write to temporary file and rename when using wheel install --force
+
+**0.24.0 (2014-07-06)**
+
+- The python tag used for pure-python packages is now .pyN (major version
+  only). This change actually occurred in 0.23.0 when the --python-tag
+  option was added, but was not explicitly mentioned in the changelog then.
+- wininst2wheel and egg2wheel removed. Use "wheel convert [archive]"
+  instead.
+- Wheel now supports setuptools style conditional requirements via the
+  extras_require={} syntax. Separate 'extra' names from conditions using
+  the : character. Wheel's own setup.py does this. (The empty-string
+  extra is the same as install_requires.) These conditional requirements
+  should work the same whether the package is installed by wheel or
+  by setup.py.
+
+**0.23.0 (2014-03-31)**
+
+- Compatibility tag flags added to the bdist_wheel command
+- sdist should include files necessary for tests
+- 'wheel convert' can now also convert unpacked eggs to wheel
+- Rename pydist.json to metadata.json to avoid stepping on the PEP
+- The --skip-scripts option has been removed, and not generating scripts is now
+  the default. The option was a temporary approach until installers could
+  generate scripts themselves. That is now the case with pip 1.5 and later.
+  Note that using pip 1.4 to install a wheel without scripts will leave the
+  installation without entry-point wrappers. The "wheel install-scripts"
+  command can be used to generate the scripts in such cases.
+- Thank you contributors
+
+**0.22.0 (2013-09-15)**
+
+- Include entry_points.txt, scripts a.k.a. commands, in experimental
+  pydist.json
+- Improved test_requires parsing
+- Python 2.6 fixes, "wheel version" command courtesy pombredanne
+
+**0.21.0 (2013-07-20)**
+
+- Pregenerated scripts are the default again.
+- "setup.py bdist_wheel --skip-scripts" turns them off.
+- setuptools is no longer a listed requirement for the 'wheel'
+  package. It is of course still required in order for bdist_wheel
+  to work.
+- "python -m wheel" avoids importing pkg_resources until it's necessary.
+
+**0.20.0**
+
+- No longer include console_scripts in wheels. Ordinary scripts (shell files,
+  standalone Python files) are included as usual.
+- Include new command "python -m wheel install-scripts [distribution
+  [distribution ...]]" to install the console_scripts (setuptools-style
+  scripts using pkg_resources) for a distribution.
+
+**0.19.0 (2013-07-19)**
+
+- pymeta.json becomes pydist.json
+
+**0.18.0 (2013-07-04)**
+
+- Python 3 Unicode improvements
+
+**0.17.0 (2013-06-23)**
+
+- Support latest PEP-426 "pymeta.json" (json-format metadata)
+
+**0.16.0 (2013-04-29)**
+
+- Python 2.6 compatibility bugfix (thanks John McFarlane)
+- Bugfix for C-extension tags for CPython 3.3 (using SOABI)
+- Bugfix for bdist_wininst converter "wheel convert"
+- Bugfix for dists where "is pure" is None instead of True or False
+- Python 3 fix for moving Unicode Description to metadata body
+- Include rudimentary API documentation in Sphinx (thanks Kevin Horn)
+
+**0.15.0 (2013-01-14)**
+
+- Various improvements
+
+**0.14.0 (2012-10-27)**
+
+- Changed the signature format to better comply with the current JWS spec.
+  Breaks all existing signatures.
+- Include ``wheel unsign`` command to remove RECORD.jws from an archive.
+- Put the description in the newly allowed payload section of PKG-INFO
+  (METADATA) files.
+
+**0.13.0 (2012-10-17)**
+
+- Use distutils instead of sysconfig to get installation paths; can install
+  headers.
+- Improve WheelFile() sort.
+- Allow bootstrap installs without any pkg_resources.
+
+**0.12.0 (2012-10-06)**
+
+- Unit test for wheel.tool.install
+
+**0.11.0 (2012-10-17)**
+
+- API cleanup
+
+**0.10.3 (2012-10-03)**
+
+- Scripts fixer fix
+
+**0.10.2 (2012-10-02)**
+
+- Fix keygen
+
+**0.10.1 (2012-09-30)**
+
+- Preserve attributes on install.
+
+**0.10.0 (2012-09-30)**
+
+- Include a copy of pkg_resources. Wheel can now install into a virtualenv
+  that does not have distribute (though most packages still require
+  pkg_resources to actually work; wheel install distribute)
+- Define a new setup.cfg section [wheel]. universal=1 will
+  apply the py2.py3-none-any tag for pure python wheels.
+
+**0.9.7 (2012-09-20)**
+
+- Only import dirspec when needed. dirspec is only needed to find the
+  configuration for keygen/signing operations.
+
+**0.9.6 (2012-09-19)**
+
+- requires-dist from setup.cfg overwrites any requirements from setup.py
+  Care must be taken that the requirements are the same in both cases,
+  or just always install from wheel.
+- drop dirspec requirement on win32
+- improved command line utility, adds 'wheel convert [egg or wininst]' to
+  convert legacy binary formats to wheel
+
+**0.9.5 (2012-09-15)**
+
+- Wheel's own wheel file can be executed by Python, and can install itself:
+  ``python wheel-0.9.5-py27-none-any/wheel install ...``
+- Use argparse; basic ``wheel install`` command should run with only stdlib
+  dependencies.
+- Allow requires_dist in setup.cfg's [metadata] section. In addition to
+  dependencies in setup.py, but will only be interpreted when installing
+  from wheel, not from sdist. Can be qualified with environment markers.
+
+**0.9.4 (2012-09-11)**
+
+- Fix wheel.signatures in sdist
+
+**0.9.3 (2012-09-10)**
+
+- Integrated digital signatures support without C extensions.
+- Integrated "wheel install" command (single package, no dependency
+  resolution) including compatibility check.
+- Support Python 3.3
+- Use Metadata 1.3 (PEP 426)
+
+**0.9.2 (2012-08-29)**
+
+- Automatic signing if WHEEL_TOOL points to the wheel binary
+- Even more Python 3 fixes
+
+**0.9.1 (2012-08-28)**
+
+- 'wheel sign' uses the keys generated by 'wheel keygen' (instead of generating
+  a new key at random each time)
+- Python 2/3 encoding/decoding fixes
+- Run tests on Python 2.6 (without signature verification)
+
+**0.9 (2012-08-22)**
+
+- Updated digital signatures scheme
+- Python 3 support for digital signatures
+- Always verify RECORD hashes on extract
+- "wheel" command line tool to sign, verify, unpack wheel files
+
+**0.8 (2012-08-17)**
+
+- none/any draft pep tags update
+- improved wininst2wheel script
+- doc changes and other improvements
+
+**0.7 (2012-07-28)**
+
+- sort .dist-info at end of wheel archive
+- Windows & Python 3 fixes from Paul Moore
+- pep8
+- scripts to convert wininst & egg to wheel
+
+**0.6 (2012-07-23)**
+
+- require distribute >= 0.6.28
+- stop using verlib
+
+**0.5 (2012-07-17)**
+
+- working pretty well
+
+**0.4.2 (2012-07-12)**
+
+- hyphenated name fix
+
+**0.4 (2012-07-11)**
+
+- improve test coverage
+- improve Windows compatibility
+- include tox.ini courtesy of Marc Abramowitz
+- draft hmac sha-256 signing function
+
+**0.3 (2012-07-04)**
+
+- prototype egg2wheel conversion script
+
+**0.2 (2012-07-03)**
+
+- Python 3 compatibility
+
+**0.1 (2012-06-30)**
+
+- Initial version
diff --git a/docs/quickstart.rst b/docs/quickstart.rst
new file mode 100644 (file)
index 0000000..13c16a3
--- /dev/null
@@ -0,0 +1,24 @@
+Quickstart
+==========
+
+To build a wheel for your project::
+
+    python -m pip install build
+    python -m build --wheel
+
+The wheel will go to ``dist/yourproject-<tags>.whl``.
+
+If you want to make universal (Python 2/3 compatible, pure Python) wheels, add the following
+section to your ``setup.cfg``::
+
+    [bdist_wheel]
+    universal = 1
+
+To convert an ``.egg`` or file to a wheel::
+
+    wheel convert youreggfile.egg
+
+Similarly, to convert a Windows installer (made using ``python setup.py bdist_wininst``) to a
+wheel::
+
+    wheel convert yourinstaller.exe
diff --git a/docs/reference/index.rst b/docs/reference/index.rst
new file mode 100644 (file)
index 0000000..f332026
--- /dev/null
@@ -0,0 +1,10 @@
+Reference Guide
+===============
+
+.. toctree::
+   :maxdepth: 2
+
+   wheel_convert
+   wheel_unpack
+   wheel_pack
+   wheel_tags
diff --git a/docs/reference/wheel_convert.rst b/docs/reference/wheel_convert.rst
new file mode 100644 (file)
index 0000000..ca625b5
--- /dev/null
@@ -0,0 +1,46 @@
+wheel convert
+=============
+
+Usage
+-----
+
+::
+
+    wheel convert [options] <egg_file_or_directory> [egg_file_or_directory...]
+
+
+Description
+-----------
+
+Convert one or more eggs (``.egg``; made with ``bdist_egg``) or Windows
+installers (``.exe``; made with ``bdist_wininst``) into wheels.
+
+Egg names must match the standard format:
+
+* ``<project>-<version>-pyX.Y`` for pure Python wheels
+* ``<project>-<version>-pyX.Y-<arch>`` for binary wheels
+
+
+Options
+-------
+
+.. option:: -d, --dest-dir <dir>
+
+    Directory to store the generated wheels in (defaults to current directory).
+
+
+Examples
+--------
+
+* Convert a single egg file::
+
+    $ wheel convert foobar-1.2.3-py2.7.egg
+    $ ls *.whl
+    foobar-1.2.3-py27-none.whl
+
+* If the egg file name is invalid::
+
+    $ wheel convert pycharm-debug.egg
+    "pycharm-debug.egg" is not a valid egg name (must match at least name-version-pyX.Y.egg)
+    $ echo $?
+    1
diff --git a/docs/reference/wheel_pack.rst b/docs/reference/wheel_pack.rst
new file mode 100644 (file)
index 0000000..0e375a8
--- /dev/null
@@ -0,0 +1,42 @@
+wheel pack
+==========
+
+Usage
+-----
+
+::
+
+    wheel pack <wheel_directory>
+
+
+Description
+-----------
+
+Repack a previously unpacked wheel file.
+
+This command can be used to repack a wheel file after its contents have been modified.
+This is the equivalent of ``zip -r <wheel_file> <wheel_directory>`` except that it regenerates the
+``RECORD`` file which contains hashes of all included files.
+
+
+Options
+-------
+
+.. option:: -d, --dest-dir <dir>
+
+    Directory to put the new wheel file into.
+
+.. option:: --build-number <tag>
+
+    Override the build tag in the new wheel file name
+
+Examples
+--------
+
+* Unpack a wheel, add a dummy module and then repack it (with a new build number)::
+
+    $ wheel unpack someproject-1.5.0-py2-py3-none.whl
+    Unpacking to: ./someproject-1.5.0
+    $ touch someproject-1.5.0/somepackage/module.py
+    $ wheel pack --build-number 2 someproject-1.5.0
+    Repacking wheel as ./someproject-1.5.0-2-py2-py3-none.whl...OK
diff --git a/docs/reference/wheel_tags.rst b/docs/reference/wheel_tags.rst
new file mode 100644 (file)
index 0000000..f437071
--- /dev/null
@@ -0,0 +1,62 @@
+wheel tags
+==========
+
+Usage
+-----
+
+::
+
+    wheel tags [-h] [--remove] [--python-tag TAG] [--abi-tag TAG] [--platform-tag TAG] [--build NUMBER] WHEEL [...]
+
+Description
+-----------
+
+Make a new wheel with given tags from and existing wheel. Any tags left
+unspecified will remain the same. Multiple tags are separated by a "." Starting
+with a "+" will append to the existing tags.  Starting with a "-" will remove a
+tag. Be sure to use the equals syntax on the shell so that it does not get
+parsed as an extra option, such as ``--python-tag=-py2``. The original file
+will remain unless ``--remove`` is given. The output filename(s) will be
+displayed on stdout for further processing.
+
+
+Options
+-------
+
+.. option:: --remove
+
+    Remove the original wheel, keeping only the retagged wheel.
+
+.. option:: --python-tag=TAG
+
+    Override the python tag (prepend with "+" to append, "-" to remove).
+    Multiple tags can be separated with a dot.
+
+.. option:: --abi-tag=TAG
+
+    Override the abi tag (prepend with "+" to append, "-" to remove).
+    Multiple tags can be separated with a dot.
+
+.. option:: --platform-tag=TAG
+
+    Override the platform tag (prepend with "+" to append, "-" to remove).
+    Multiple tags can be separated with a dot.
+
+.. option:: --build=NUMBER
+
+    Specify a build number.
+
+Examples
+--------
+
+* Replace a wheel's Python specific tags with generic tags (if no Python extensions are present, for example)::
+
+    $ wheel tags --python-tag=py2.py3 --abi-tag=none cmake-3.20.2-cp39-cp39-win_amd64.whl
+    cmake-3.20.2-py2.py3-none-win_amd64.whl
+
+* Add compatibility tags for macOS universal wheels and older pips::
+
+    $ wheel tags \
+        --platform-tag=+macosx_10_9_x86_64.macosx_11_0_arm64 \
+        ninja-1.11.1-py2.py3-none-macosx_10_9_universal2.whl
+    ninja-1.11.1-py2.py3-none-macosx_10_9_universal2.macosx_10_9_x86_64.macosx_11_0_arm64.whl
diff --git a/docs/reference/wheel_unpack.rst b/docs/reference/wheel_unpack.rst
new file mode 100644 (file)
index 0000000..0b1a034
--- /dev/null
@@ -0,0 +1,46 @@
+wheel unpack
+============
+
+Usage
+-----
+
+::
+
+    wheel unpack <wheel_file>
+
+
+Description
+-----------
+
+Unpack the given wheel file.
+
+This is the equivalent of ``unzip <wheel_file>``, except that it also checks
+that the hashes and file sizes match with those in ``RECORD`` and exits with an
+error if it encounters a mismatch.
+
+
+Options
+-------
+
+.. option:: -d, --dest <dir>
+
+    Directory to unpack the wheel into.
+
+
+Examples
+--------
+
+* Unpack a wheel::
+
+    $ wheel unpack someproject-1.5.0-py2-py3-none.whl
+    Unpacking to: ./someproject-1.5.0
+
+* If a file's hash does not match::
+
+    $ wheel unpack someproject-1.5.0-py2-py3-none.whl
+    Unpacking to: ./someproject-1.5.0
+    Traceback (most recent call last):
+    ...
+    wheel.install.BadWheelFile: Bad hash for file 'mypackage/module.py'
+    $ echo $?
+    1
diff --git a/docs/story.rst b/docs/story.rst
new file mode 100644 (file)
index 0000000..101c8cd
--- /dev/null
@@ -0,0 +1,62 @@
+The Story of Wheel
+==================
+
+I was impressed with Tarek’s packaging talk at PyCon 2010, and I
+admire PEP 345 (Metadata for Python Software Packages 1.2) and PEP 376
+(Database of Installed Python Distributions) which standardize a richer
+metadata format and show how distributions should be installed on disk. So
+naturally with all the hubbub about ``packaging`` in Python 3.3, I decided
+to try it to reap the benefits of a more standardized and predictable
+Python packaging experience.
+
+I began by converting ``cryptacular``, a password hashing package which
+has a simple C extension, to use ``setup.cfg``. I downloaded the Python 3.3
+source, struggled with the difference between ``setup.py`` and ``setup.cfg``
+syntax, fixed the ``define_macros`` feature, stopped using the missing
+``extras`` functionality, and several hours later I was able to generate my
+``METADATA`` from ``setup.cfg``. I rejoiced at my newfound freedom from the
+tyranny of arbitrary code execution during the build and install process.
+
+It was a lot of work. The package is worse off than before, and it can’t
+be built or installed without patching the Python source code itself.
+
+It was about that time that distutils-sig had a discussion about the
+need to include a generated ``setup.cfg`` from ``setup.cfg`` because
+``setup.cfg`` wasn’t static enough. Wait, what?
+
+Of course there is a different way to massively simplify the install
+process. It’s called built or binary packages. You never have to run
+``setup.py`` because there is no ``setup.py``. There is only METADATA aka
+PKG-INFO. Installation has two steps: ‘build package’; ‘install
+package’, and you can skip the first step, have someone else do it
+for you, do it on another machine, or install the build system from a
+binary package and let the build system handle the building. The build
+is still complicated, but installation is simple.
+
+With the binary package strategy people who want to install use a simple,
+compatible installer, and people who want to package use whatever is
+convenient for them for as long as it meets their needs. No one has
+to rewrite ``setup.py`` for their own or the 20k+ other packages on PyPI
+unless a different build system does a better job.
+
+Wheel is my attempt to benefit from the excellent distutils-sig work
+without having to fix the intractable ``distutils`` software itself. Like
+``METADATA`` and ``.dist-info`` directories but unlike Extension(), it’s
+simple enough that there really could be alternate implementations; the
+simplest (but less than ideal) installer is nothing more than “unzip
+archive.whl” somewhere on sys.path.
+
+If you’ve made it this far you probably wonder whether I’ve heard
+of eggs. Some comparisons:
+
+* Wheel is an installation format; egg is importable. Wheel archives do not need to include .pyc and are less tied to a specific Python version or implementation. Wheel can install (pure Python) packages built with previous versions of Python so you don’t always have to wait for the packager to catch up.
+
+* Wheel uses .dist-info directories; egg uses .egg-info. Wheel is compatible with the new world of Python ``packaging`` and the new concepts it brings.
+
+* Wheel has a richer file naming convention for today’s multi-implementation world. A single wheel archive can indicate its compatibility with a number of Python language versions and implementations, ABIs, and system architectures. Historically the ABI has been specific to a CPython release, but when we get a longer-term ABI, wheel will be ready.
+
+* Wheel is lossless. The first wheel implementation ``bdist_wheel`` always generates ``egg-info``, and then converts it to a ``.whl``. Later tools will allow for the conversion of existing eggs and bdist_wininst distributions.
+
+* Wheel is versioned. Every wheel file contains the version of the wheel specification and the implementation that packaged it. Hopefully the next migration can simply be to Wheel 2.0.
+
+I hope you will benefit from wheel.
diff --git a/docs/user_guide.rst b/docs/user_guide.rst
new file mode 100644 (file)
index 0000000..c8b5a34
--- /dev/null
@@ -0,0 +1,86 @@
+User Guide
+==========
+
+Building Wheels
+---------------
+
+To build a wheel for your project::
+
+    python -m pip install build
+    python -m build --wheel
+
+This will build any C extensions in the project and then package those and the
+pure Python code into a ``.whl`` file in the ``dist`` directory.
+
+If your project contains no C extensions and is expected to work on both
+Python 2 and 3, you will want to tell wheel to produce universal wheels by
+adding this to your ``setup.cfg`` file:
+
+.. code-block:: ini
+
+    [bdist_wheel]
+    universal = 1
+
+
+Including license files in the generated wheel file
+---------------------------------------------------
+
+Several open source licenses require the license text to be included in every
+distributable artifact of the project. By default, ``wheel`` conveniently
+includes files matching the following glob_ patterns in the ``.dist-info``
+directory:
+
+* ``AUTHORS*``
+* ``COPYING*``
+* ``LICEN[CS]E*``
+* ``NOTICE*``
+
+This can be overridden by setting the ``license_files`` option in the
+``[metadata]`` section of the project's ``setup.cfg``. For example:
+
+.. code-block:: cfg
+
+   [metadata]
+   license_files =
+      license.txt
+      3rdparty/*.txt
+
+No matter the path, all the matching license files are written in the wheel in
+the ``.dist-info`` directory based on their file name only.
+
+By specifying an empty ``license_files`` option, you can disable this
+functionality entirely.
+
+.. note:: There used to be an option called ``license_file`` (singular).
+    As of wheel v0.32, this option has been deprecated in favor of the more
+    versatile ``license_files`` option.
+
+.. _glob: https://docs.python.org/library/glob.html
+
+Converting Eggs to Wheels
+-------------------------
+
+The wheel tool is capable of converting eggs to the wheel format.
+It works on both ``.egg`` files and ``.egg`` directories, and you can convert
+multiple eggs with a single command::
+
+    wheel convert blah-1.2.3-py2.7.egg foo-2.0b1-py3.5.egg
+
+The command supports wildcard expansion as well (via :func:`~glob.iglob`) to
+accommodate shells that do not do such expansion natively::
+
+    wheel convert *.egg
+
+By default, the resulting wheels are written to the current working directory.
+This can be changed with the ``--dest-dir`` option::
+
+    wheel convert --dest-dir /tmp blah-1.2.3-py2.7.egg
+
+Installing Wheels
+-----------------
+
+To install a wheel file, use pip_::
+
+    $ pip install someproject-1.5.0-py2-py3-none.whl
+
+.. _pip: https://pypi.org/project/pip/
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644 (file)
index 0000000..f33ab16
--- /dev/null
@@ -0,0 +1,140 @@
+[build-system]
+requires = ["flit_core >=3.8,<4"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "wheel"
+description = "A built-package format for Python"
+readme = "README.rst"
+classifiers = [
+    "Development Status :: 5 - Production/Stable",
+    "Intended Audience :: Developers",
+    "Topic :: System :: Archiving :: Packaging",
+    "License :: OSI Approved :: MIT License",
+    "Programming Language :: Python",
+    "Programming Language :: Python :: 3 :: Only",
+    "Programming Language :: Python :: 3.7",
+    "Programming Language :: Python :: 3.8",
+    "Programming Language :: Python :: 3.9",
+    "Programming Language :: Python :: 3.10",
+    "Programming Language :: Python :: 3.11",
+    "Programming Language :: Python :: 3.12",
+]
+authors = [{name = "Daniel Holth", email = "dholth@fastmail.fm"}]
+maintainers = [{name = "Alex Grönholm", email = "alex.gronholm@nextday.fi"}]
+keywords = ["wheel", "packaging"]
+requires-python = ">=3.7"
+dynamic = ["version"]
+
+[project.urls]
+Documentation = "https://wheel.readthedocs.io/"
+Changelog = "https://wheel.readthedocs.io/en/stable/news.html"
+"Issue Tracker" = "https://github.com/pypa/wheel/issues"
+Source = "https://github.com/pypa/wheel"
+
+[project.scripts]
+wheel = "wheel.cli:main"
+
+[project.entry-points."distutils.commands"]
+bdist_wheel = "wheel.bdist_wheel:bdist_wheel"
+
+[project.optional-dependencies]
+test = [
+    "pytest >= 6.0.0",
+    "setuptools >= 65",
+]
+
+[tool.flit.sdist]
+include = [
+    "LICENSE*",
+    "docs/**/*.py",
+    "docs/**/*.rst",
+    "docs/Makefile",
+    "docs/make.bat",
+    "manpages/*.rst",
+    "tests/**/*.py",
+    "tests/**/*.txt",
+    "tests/**/*.c",
+    "tests/**/*.h",
+    "tests/**/*.cfg",
+    "tests/testdata/macosx_minimal_system_version/*.dylib",
+    "tests/testdata/test-1.0-py2.py3-none-any.whl",
+]
+exclude = [
+    ".cirrus.yml",
+    ".github/**",
+    ".gitignore",
+    ".pre-commit-config.yaml",
+    ".readthedocs.yml",
+    "**/__pycache__",
+]
+
+[tool.black]
+extend-exclude = '''
+^/src/wheel/vendored/
+'''
+
+[tool.pytest.ini_options]
+minversion = "6.0"
+addopts = ["-ra", "--showlocals", "--strict-markers", "--strict-config"]
+xfail_strict = true
+filterwarnings = [
+    "error",
+    "ignore::Warning:_pytest.*",
+]
+log_cli_level = "info"
+testpaths = ["test"]
+
+[tool.coverage.run]
+source = ["wheel"]
+omit = ["*/vendored/*"]
+
+[tool.coverage.report]
+show_missing = true
+
+[tool.ruff]
+line-length = 88
+select = [
+    "E", "F", "W",  # default flake-8
+    "I",            # isort
+    "PGH",          # pygrep-hooks
+    "UP",           # pyupgrade
+    "B",            # flake8-bugbear
+]
+src = ["src"]
+
+# Tox (https://tox.wiki/) is a tool for running tests in multiple virtualenvs.
+# This configuration file will run the test suite on all supported python
+# versions. To use it, "pipx install tox" and then run "tox" from this
+# directory.
+
+[tool.tox]
+legacy_tox_ini = '''
+[tox]
+envlist = py37, py38, py39, py310, py311, py312, py313, pypy3, lint, pkg
+minversion = 4.0.0
+skip_missing_interpreters = true
+
+[testenv]
+package = wheel
+wheel_build_env = .pkg
+depends = lint
+commands = {env_python} -b -m pytest {posargs}
+extras = test
+set_env =
+  PYTHONWARNDEFAULTENCODING = 1
+
+[testenv:lint]
+depends =
+basepython = python3
+deps = pre-commit
+commands = pre-commit run --all-files --show-diff-on-failure
+skip_install = true
+
+[testenv:pkg]
+basepython = python3
+deps =
+    build
+    flit>=3.8
+commands = {envpython} -b -m pytest tests/test_sdist.py {posargs}
+'''
diff --git a/setup.py b/setup.py
new file mode 100644 (file)
index 0000000..d6f0b18
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# setup.py generated by flit for tools that don't yet use PEP 517
+
+from distutils.core import setup
+
+packages = \
+['wheel', 'wheel.cli', 'wheel.vendored', 'wheel.vendored.packaging']
+
+package_data = \
+{'': ['*']}
+
+package_dir = \
+{'': 'src'}
+
+extras_require = \
+{'test': ['pytest >= 6.0.0', 'setuptools >= 65']}
+
+entry_points = \
+{'console_scripts': ['wheel = wheel.cli:main'],
+ 'distutils.commands': ['bdist_wheel = wheel.bdist_wheel:bdist_wheel']}
+
+setup(name='wheel',
+      version='0.42.0',
+      description='A built-package format for Python',
+      author=None,
+      author_email='Daniel Holth <dholth@fastmail.fm>',
+      url=None,
+      packages=packages,
+      package_data=package_data,
+      package_dir=package_dir,
+      extras_require=extras_require,
+      entry_points=entry_points,
+      python_requires='>=3.7',
+     )
diff --git a/src/wheel/__init__.py b/src/wheel/__init__.py
new file mode 100644 (file)
index 0000000..6cfc477
--- /dev/null
@@ -0,0 +1,3 @@
+from __future__ import annotations
+
+__version__ = "0.42.0"
diff --git a/src/wheel/__main__.py b/src/wheel/__main__.py
new file mode 100644 (file)
index 0000000..0be7453
--- /dev/null
@@ -0,0 +1,23 @@
+"""
+Wheel command line tool (enable python -m wheel syntax)
+"""
+
+from __future__ import annotations
+
+import sys
+
+
+def main():  # needed for console script
+    if __package__ == "":
+        # To be able to run 'python wheel-0.9.whl/wheel':
+        import os.path
+
+        path = os.path.dirname(os.path.dirname(__file__))
+        sys.path[0:0] = [path]
+    import wheel.cli
+
+    sys.exit(wheel.cli.main())
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/src/wheel/_setuptools_logging.py b/src/wheel/_setuptools_logging.py
new file mode 100644 (file)
index 0000000..006c098
--- /dev/null
@@ -0,0 +1,26 @@
+# copied from setuptools.logging, omitting monkeypatching
+from __future__ import annotations
+
+import logging
+import sys
+
+
+def _not_warning(record):
+    return record.levelno < logging.WARNING
+
+
+def configure():
+    """
+    Configure logging to emit warning and above to stderr
+    and everything else to stdout. This behavior is provided
+    for compatibility with distutils.log but may change in
+    the future.
+    """
+    err_handler = logging.StreamHandler()
+    err_handler.setLevel(logging.WARNING)
+    out_handler = logging.StreamHandler(sys.stdout)
+    out_handler.addFilter(_not_warning)
+    handlers = err_handler, out_handler
+    logging.basicConfig(
+        format="{message}", style="{", handlers=handlers, level=logging.DEBUG
+    )
diff --git a/src/wheel/bdist_wheel.py b/src/wheel/bdist_wheel.py
new file mode 100644 (file)
index 0000000..cc3e259
--- /dev/null
@@ -0,0 +1,595 @@
+"""
+Create a wheel (.whl) distribution.
+
+A wheel is a built archive format.
+"""
+
+from __future__ import annotations
+
+import os
+import re
+import shutil
+import stat
+import struct
+import sys
+import sysconfig
+import warnings
+from email.generator import BytesGenerator, Generator
+from email.policy import EmailPolicy
+from glob import iglob
+from shutil import rmtree
+from zipfile import ZIP_DEFLATED, ZIP_STORED
+
+import setuptools
+from setuptools import Command
+
+from . import __version__ as wheel_version
+from .macosx_libfile import calculate_macosx_platform_tag
+from .metadata import pkginfo_to_metadata
+from .util import log
+from .vendored.packaging import tags
+from .vendored.packaging import version as _packaging_version
+from .wheelfile import WheelFile
+
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub("[^A-Za-z0-9.]+", "-", name)
+
+
+def safe_version(version):
+    """
+    Convert an arbitrary string to a standard version string
+    """
+    try:
+        # normalize the version
+        return str(_packaging_version.Version(version))
+    except _packaging_version.InvalidVersion:
+        version = version.replace(" ", ".")
+        return re.sub("[^A-Za-z0-9.]+", "-", version)
+
+
+setuptools_major_version = int(setuptools.__version__.split(".")[0])
+
+PY_LIMITED_API_PATTERN = r"cp3\d"
+
+
+def _is_32bit_interpreter():
+    return struct.calcsize("P") == 4
+
+
+def python_tag():
+    return f"py{sys.version_info[0]}"
+
+
+def get_platform(archive_root):
+    """Return our platform name 'win32', 'linux_x86_64'"""
+    result = sysconfig.get_platform()
+    if result.startswith("macosx") and archive_root is not None:
+        result = calculate_macosx_platform_tag(archive_root, result)
+    elif _is_32bit_interpreter():
+        if result == "linux-x86_64":
+            # pip pull request #3497
+            result = "linux-i686"
+        elif result == "linux-aarch64":
+            # packaging pull request #234
+            # TODO armv8l, packaging pull request #690 => this did not land
+            # in pip/packaging yet
+            result = "linux-armv7l"
+
+    return result.replace("-", "_")
+
+
+def get_flag(var, fallback, expected=True, warn=True):
+    """Use a fallback value for determining SOABI flags if the needed config
+    var is unset or unavailable."""
+    val = sysconfig.get_config_var(var)
+    if val is None:
+        if warn:
+            warnings.warn(
+                f"Config variable '{var}' is unset, Python ABI tag may " "be incorrect",
+                RuntimeWarning,
+                stacklevel=2,
+            )
+        return fallback
+    return val == expected
+
+
+def get_abi_tag():
+    """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
+    soabi = sysconfig.get_config_var("SOABI")
+    impl = tags.interpreter_name()
+    if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
+        d = ""
+        m = ""
+        u = ""
+        if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
+            d = "d"
+
+        if get_flag(
+            "WITH_PYMALLOC",
+            impl == "cp",
+            warn=(impl == "cp" and sys.version_info < (3, 8)),
+        ) and sys.version_info < (3, 8):
+            m = "m"
+
+        abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
+    elif soabi and impl == "cp" and soabi.startswith("cpython"):
+        # non-Windows
+        abi = "cp" + soabi.split("-")[1]
+    elif soabi and impl == "cp" and soabi.startswith("cp"):
+        # Windows
+        abi = soabi.split("-")[0]
+    elif soabi and impl == "pp":
+        # we want something like pypy36-pp73
+        abi = "-".join(soabi.split("-")[:2])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi and impl == "graalpy":
+        abi = "-".join(soabi.split("-")[:3])
+        abi = abi.replace(".", "_").replace("-", "_")
+    elif soabi:
+        abi = soabi.replace(".", "_").replace("-", "_")
+    else:
+        abi = None
+
+    return abi
+
+
+def safer_name(name):
+    return safe_name(name).replace("-", "_")
+
+
+def safer_version(version):
+    return safe_version(version).replace("-", "_")
+
+
+def remove_readonly(func, path, excinfo):
+    remove_readonly_exc(func, path, excinfo[1])
+
+
+def remove_readonly_exc(func, path, exc):
+    os.chmod(path, stat.S_IWRITE)
+    func(path)
+
+
+class bdist_wheel(Command):
+    description = "create a wheel distribution"
+
+    supported_compressions = {
+        "stored": ZIP_STORED,
+        "deflated": ZIP_DEFLATED,
+    }
+
+    user_options = [
+        ("bdist-dir=", "b", "temporary directory for creating the distribution"),
+        (
+            "plat-name=",
+            "p",
+            "platform name to embed in generated filenames "
+            "(default: %s)" % get_platform(None),
+        ),
+        (
+            "keep-temp",
+            "k",
+            "keep the pseudo-installation tree around after "
+            "creating the distribution archive",
+        ),
+        ("dist-dir=", "d", "directory to put final built distributions in"),
+        ("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
+        (
+            "relative",
+            None,
+            "build the archive using relative paths " "(default: false)",
+        ),
+        (
+            "owner=",
+            "u",
+            "Owner name used when creating a tar file" " [default: current user]",
+        ),
+        (
+            "group=",
+            "g",
+            "Group name used when creating a tar file" " [default: current group]",
+        ),
+        ("universal", None, "make a universal wheel" " (default: false)"),
+        (
+            "compression=",
+            None,
+            "zipfile compression (one of: {})" " (default: 'deflated')".format(
+                ", ".join(supported_compressions)
+            ),
+        ),
+        (
+            "python-tag=",
+            None,
+            "Python implementation compatibility tag"
+            " (default: '%s')" % (python_tag()),
+        ),
+        (
+            "build-number=",
+            None,
+            "Build number for this particular version. "
+            "As specified in PEP-0427, this must start with a digit. "
+            "[default: None]",
+        ),
+        (
+            "py-limited-api=",
+            None,
+            "Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)",
+        ),
+    ]
+
+    boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
+
+    def initialize_options(self):
+        self.bdist_dir = None
+        self.data_dir = None
+        self.plat_name = None
+        self.plat_tag = None
+        self.format = "zip"
+        self.keep_temp = False
+        self.dist_dir = None
+        self.egginfo_dir = None
+        self.root_is_pure = None
+        self.skip_build = None
+        self.relative = False
+        self.owner = None
+        self.group = None
+        self.universal = False
+        self.compression = "deflated"
+        self.python_tag = python_tag()
+        self.build_number = None
+        self.py_limited_api = False
+        self.plat_name_supplied = False
+
+    def finalize_options(self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command("bdist").bdist_base
+            self.bdist_dir = os.path.join(bdist_base, "wheel")
+
+        egg_info = self.distribution.get_command_obj("egg_info")
+        egg_info.ensure_finalized()  # needed for correct `wheel_dist_name`
+
+        self.data_dir = self.wheel_dist_name + ".data"
+        self.plat_name_supplied = self.plat_name is not None
+
+        try:
+            self.compression = self.supported_compressions[self.compression]
+        except KeyError:
+            raise ValueError(f"Unsupported compression: {self.compression}") from None
+
+        need_options = ("dist_dir", "plat_name", "skip_build")
+
+        self.set_undefined_options("bdist", *zip(need_options, need_options))
+
+        self.root_is_pure = not (
+            self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
+        )
+
+        if self.py_limited_api and not re.match(
+            PY_LIMITED_API_PATTERN, self.py_limited_api
+        ):
+            raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
+
+        # Support legacy [wheel] section for setting universal
+        wheel = self.distribution.get_option_dict("wheel")
+        if "universal" in wheel:
+            # please don't define this in your global configs
+            log.warning(
+                "The [wheel] section is deprecated. Use [bdist_wheel] instead.",
+            )
+            val = wheel["universal"][1].strip()
+            if val.lower() in ("1", "true", "yes"):
+                self.universal = True
+
+        if self.build_number is not None and not self.build_number[:1].isdigit():
+            raise ValueError("Build tag (build-number) must start with a digit.")
+
+    @property
+    def wheel_dist_name(self):
+        """Return distribution full name with - replaced with _"""
+        components = (
+            safer_name(self.distribution.get_name()),
+            safer_version(self.distribution.get_version()),
+        )
+        if self.build_number:
+            components += (self.build_number,)
+        return "-".join(components)
+
+    def get_tag(self):
+        # bdist sets self.plat_name if unset, we should only use it for purepy
+        # wheels if the user supplied it.
+        if self.plat_name_supplied:
+            plat_name = self.plat_name
+        elif self.root_is_pure:
+            plat_name = "any"
+        else:
+            # macosx contains system version in platform name so need special handle
+            if self.plat_name and not self.plat_name.startswith("macosx"):
+                plat_name = self.plat_name
+            else:
+                # on macosx always limit the platform name to comply with any
+                # c-extension modules in bdist_dir, since the user can specify
+                # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
+
+                # on other platforms, and on macosx if there are no c-extension
+                # modules, use the default platform name.
+                plat_name = get_platform(self.bdist_dir)
+
+            if _is_32bit_interpreter():
+                if plat_name in ("linux-x86_64", "linux_x86_64"):
+                    plat_name = "linux_i686"
+                if plat_name in ("linux-aarch64", "linux_aarch64"):
+                    # TODO armv8l, packaging pull request #690 => this did not land
+                    # in pip/packaging yet
+                    plat_name = "linux_armv7l"
+
+        plat_name = (
+            plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
+        )
+
+        if self.root_is_pure:
+            if self.universal:
+                impl = "py2.py3"
+            else:
+                impl = self.python_tag
+            tag = (impl, "none", plat_name)
+        else:
+            impl_name = tags.interpreter_name()
+            impl_ver = tags.interpreter_version()
+            impl = impl_name + impl_ver
+            # We don't work on CPython 3.1, 3.0.
+            if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
+                impl = self.py_limited_api
+                abi_tag = "abi3"
+            else:
+                abi_tag = str(get_abi_tag()).lower()
+            tag = (impl, abi_tag, plat_name)
+            # issue gh-374: allow overriding plat_name
+            supported_tags = [
+                (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
+            ]
+            assert (
+                tag in supported_tags
+            ), f"would build wheel with unsupported tag {tag}"
+        return tag
+
+    def run(self):
+        build_scripts = self.reinitialize_command("build_scripts")
+        build_scripts.executable = "python"
+        build_scripts.force = True
+
+        build_ext = self.reinitialize_command("build_ext")
+        build_ext.inplace = False
+
+        if not self.skip_build:
+            self.run_command("build")
+
+        install = self.reinitialize_command("install", reinit_subcommands=True)
+        install.root = self.bdist_dir
+        install.compile = False
+        install.skip_build = self.skip_build
+        install.warn_dir = False
+
+        # A wheel without setuptools scripts is more cross-platform.
+        # Use the (undocumented) `no_ep` option to setuptools'
+        # install_scripts command to avoid creating entry point scripts.
+        install_scripts = self.reinitialize_command("install_scripts")
+        install_scripts.no_ep = True
+
+        # Use a custom scheme for the archive, because we have to decide
+        # at installation time which scheme to use.
+        for key in ("headers", "scripts", "data", "purelib", "platlib"):
+            setattr(install, "install_" + key, os.path.join(self.data_dir, key))
+
+        basedir_observed = ""
+
+        if os.name == "nt":
+            # win32 barfs if any of these are ''; could be '.'?
+            # (distutils.command.install:change_roots bug)
+            basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
+            self.install_libbase = self.install_lib = basedir_observed
+
+        setattr(
+            install,
+            "install_purelib" if self.root_is_pure else "install_platlib",
+            basedir_observed,
+        )
+
+        log.info(f"installing to {self.bdist_dir}")
+
+        self.run_command("install")
+
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            archive_root = os.path.join(
+                self.bdist_dir, self._ensure_relative(install.install_base)
+            )
+
+        self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
+        distinfo_dirname = "{}-{}.dist-info".format(
+            safer_name(self.distribution.get_name()),
+            safer_version(self.distribution.get_version()),
+        )
+        distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
+        self.egg2dist(self.egginfo_dir, distinfo_dir)
+
+        self.write_wheelfile(distinfo_dir)
+
+        # Make the archive
+        if not os.path.exists(self.dist_dir):
+            os.makedirs(self.dist_dir)
+
+        wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
+        with WheelFile(wheel_path, "w", self.compression) as wf:
+            wf.write_files(archive_root)
+
+        # Add to 'Distribution.dist_files' so that the "upload" command works
+        getattr(self.distribution, "dist_files", []).append(
+            (
+                "bdist_wheel",
+                "{}.{}".format(*sys.version_info[:2]),  # like 3.7
+                wheel_path,
+            )
+        )
+
+        if not self.keep_temp:
+            log.info(f"removing {self.bdist_dir}")
+            if not self.dry_run:
+                if sys.version_info < (3, 12):
+                    rmtree(self.bdist_dir, onerror=remove_readonly)
+                else:
+                    rmtree(self.bdist_dir, onexc=remove_readonly_exc)
+
+    def write_wheelfile(
+        self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")"
+    ):
+        from email.message import Message
+
+        msg = Message()
+        msg["Wheel-Version"] = "1.0"  # of the spec
+        msg["Generator"] = generator
+        msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
+        if self.build_number is not None:
+            msg["Build"] = self.build_number
+
+        # Doesn't work for bdist_wininst
+        impl_tag, abi_tag, plat_tag = self.get_tag()
+        for impl in impl_tag.split("."):
+            for abi in abi_tag.split("."):
+                for plat in plat_tag.split("."):
+                    msg["Tag"] = "-".join((impl, abi, plat))
+
+        wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
+        log.info(f"creating {wheelfile_path}")
+        with open(wheelfile_path, "wb") as f:
+            BytesGenerator(f, maxheaderlen=0).flatten(msg)
+
+    def _ensure_relative(self, path):
+        # copied from dir_util, deleted
+        drive, path = os.path.splitdrive(path)
+        if path[0:1] == os.sep:
+            path = drive + path[1:]
+        return path
+
+    @property
+    def license_paths(self):
+        if setuptools_major_version >= 57:
+            # Setuptools has resolved any patterns to actual file names
+            return self.distribution.metadata.license_files or ()
+
+        files = set()
+        metadata = self.distribution.get_option_dict("metadata")
+        if setuptools_major_version >= 42:
+            # Setuptools recognizes the license_files option but does not do globbing
+            patterns = self.distribution.metadata.license_files
+        else:
+            # Prior to those, wheel is entirely responsible for handling license files
+            if "license_files" in metadata:
+                patterns = metadata["license_files"][1].split()
+            else:
+                patterns = ()
+
+        if "license_file" in metadata:
+            warnings.warn(
+                'The "license_file" option is deprecated. Use "license_files" instead.',
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            files.add(metadata["license_file"][1])
+
+        if not files and not patterns and not isinstance(patterns, list):
+            patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
+
+        for pattern in patterns:
+            for path in iglob(pattern):
+                if path.endswith("~"):
+                    log.debug(
+                        f'ignoring license file "{path}" as it looks like a backup'
+                    )
+                    continue
+
+                if path not in files and os.path.isfile(path):
+                    log.info(
+                        f'adding license file "{path}" (matched pattern "{pattern}")'
+                    )
+                    files.add(path)
+
+        return files
+
+    def egg2dist(self, egginfo_path, distinfo_path):
+        """Convert an .egg-info directory into a .dist-info directory"""
+
+        def adios(p):
+            """Appropriately delete directory, file or link."""
+            if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
+                shutil.rmtree(p)
+            elif os.path.exists(p):
+                os.unlink(p)
+
+        adios(distinfo_path)
+
+        if not os.path.exists(egginfo_path):
+            # There is no egg-info. This is probably because the egg-info
+            # file/directory is not named matching the distribution name used
+            # to name the archive file. Check for this case and report
+            # accordingly.
+            import glob
+
+            pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
+            possible = glob.glob(pat)
+            err = f"Egg metadata expected at {egginfo_path} but not found"
+            if possible:
+                alt = os.path.basename(possible[0])
+                err += f" ({alt} found - possible misnamed archive file?)"
+
+            raise ValueError(err)
+
+        if os.path.isfile(egginfo_path):
+            # .egg-info is a single file
+            pkginfo_path = egginfo_path
+            pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
+            os.mkdir(distinfo_path)
+        else:
+            # .egg-info is a directory
+            pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
+            pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
+
+            # ignore common egg metadata that is useless to wheel
+            shutil.copytree(
+                egginfo_path,
+                distinfo_path,
+                ignore=lambda x, y: {
+                    "PKG-INFO",
+                    "requires.txt",
+                    "SOURCES.txt",
+                    "not-zip-safe",
+                },
+            )
+
+            # delete dependency_links if it is only whitespace
+            dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
+            with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
+                dependency_links = dependency_links_file.read().strip()
+            if not dependency_links:
+                adios(dependency_links_path)
+
+        pkg_info_path = os.path.join(distinfo_path, "METADATA")
+        serialization_policy = EmailPolicy(
+            utf8=True,
+            mangle_from_=False,
+            max_line_length=0,
+        )
+        with open(pkg_info_path, "w", encoding="utf-8") as out:
+            Generator(out, policy=serialization_policy).flatten(pkg_info)
+
+        for license_path in self.license_paths:
+            filename = os.path.basename(license_path)
+            shutil.copy(license_path, os.path.join(distinfo_path, filename))
+
+        adios(egginfo_path)
diff --git a/src/wheel/cli/__init__.py b/src/wheel/cli/__init__.py
new file mode 100644 (file)
index 0000000..a38860f
--- /dev/null
@@ -0,0 +1,155 @@
+"""
+Wheel command-line utility.
+"""
+
+from __future__ import annotations
+
+import argparse
+import os
+import sys
+from argparse import ArgumentTypeError
+
+
+class WheelError(Exception):
+    pass
+
+
+def unpack_f(args):
+    from .unpack import unpack
+
+    unpack(args.wheelfile, args.dest)
+
+
+def pack_f(args):
+    from .pack import pack
+
+    pack(args.directory, args.dest_dir, args.build_number)
+
+
+def convert_f(args):
+    from .convert import convert
+
+    convert(args.files, args.dest_dir, args.verbose)
+
+
+def tags_f(args):
+    from .tags import tags
+
+    names = (
+        tags(
+            wheel,
+            args.python_tag,
+            args.abi_tag,
+            args.platform_tag,
+            args.build,
+            args.remove,
+        )
+        for wheel in args.wheel
+    )
+
+    for name in names:
+        print(name)
+
+
+def version_f(args):
+    from .. import __version__
+
+    print("wheel %s" % __version__)
+
+
+def parse_build_tag(build_tag: str) -> str:
+    if build_tag and not build_tag[0].isdigit():
+        raise ArgumentTypeError("build tag must begin with a digit")
+    elif "-" in build_tag:
+        raise ArgumentTypeError("invalid character ('-') in build tag")
+
+    return build_tag
+
+
+TAGS_HELP = """\
+Make a new wheel with given tags. Any tags unspecified will remain the same.
+Starting the tags with a "+" will append to the existing tags. Starting with a
+"-" will remove a tag (use --option=-TAG syntax). Multiple tags can be
+separated by ".". The original file will remain unless --remove is given.  The
+output filename(s) will be displayed on stdout for further processing.
+"""
+
+
+def parser():
+    p = argparse.ArgumentParser()
+    s = p.add_subparsers(help="commands")
+
+    unpack_parser = s.add_parser("unpack", help="Unpack wheel")
+    unpack_parser.add_argument(
+        "--dest", "-d", help="Destination directory", default="."
+    )
+    unpack_parser.add_argument("wheelfile", help="Wheel file")
+    unpack_parser.set_defaults(func=unpack_f)
+
+    repack_parser = s.add_parser("pack", help="Repack wheel")
+    repack_parser.add_argument("directory", help="Root directory of the unpacked wheel")
+    repack_parser.add_argument(
+        "--dest-dir",
+        "-d",
+        default=os.path.curdir,
+        help="Directory to store the wheel (default %(default)s)",
+    )
+    repack_parser.add_argument(
+        "--build-number", help="Build tag to use in the wheel name"
+    )
+    repack_parser.set_defaults(func=pack_f)
+
+    convert_parser = s.add_parser("convert", help="Convert egg or wininst to wheel")
+    convert_parser.add_argument("files", nargs="*", help="Files to convert")
+    convert_parser.add_argument(
+        "--dest-dir",
+        "-d",
+        default=os.path.curdir,
+        help="Directory to store wheels (default %(default)s)",
+    )
+    convert_parser.add_argument("--verbose", "-v", action="store_true")
+    convert_parser.set_defaults(func=convert_f)
+
+    tags_parser = s.add_parser(
+        "tags", help="Add or replace the tags on a wheel", description=TAGS_HELP
+    )
+    tags_parser.add_argument("wheel", nargs="*", help="Existing wheel(s) to retag")
+    tags_parser.add_argument(
+        "--remove",
+        action="store_true",
+        help="Remove the original files, keeping only the renamed ones",
+    )
+    tags_parser.add_argument(
+        "--python-tag", metavar="TAG", help="Specify an interpreter tag(s)"
+    )
+    tags_parser.add_argument("--abi-tag", metavar="TAG", help="Specify an ABI tag(s)")
+    tags_parser.add_argument(
+        "--platform-tag", metavar="TAG", help="Specify a platform tag(s)"
+    )
+    tags_parser.add_argument(
+        "--build", type=parse_build_tag, metavar="BUILD", help="Specify a build tag"
+    )
+    tags_parser.set_defaults(func=tags_f)
+
+    version_parser = s.add_parser("version", help="Print version and exit")
+    version_parser.set_defaults(func=version_f)
+
+    help_parser = s.add_parser("help", help="Show this help")
+    help_parser.set_defaults(func=lambda args: p.print_help())
+
+    return p
+
+
+def main():
+    p = parser()
+    args = p.parse_args()
+    if not hasattr(args, "func"):
+        p.print_help()
+    else:
+        try:
+            args.func(args)
+            return 0
+        except WheelError as e:
+            print(e, file=sys.stderr)
+
+    return 1
diff --git a/src/wheel/cli/convert.py b/src/wheel/cli/convert.py
new file mode 100644 (file)
index 0000000..2915340
--- /dev/null
@@ -0,0 +1,273 @@
+from __future__ import annotations
+
+import os.path
+import re
+import shutil
+import tempfile
+import zipfile
+from glob import iglob
+
+from ..bdist_wheel import bdist_wheel
+from ..wheelfile import WheelFile
+from . import WheelError
+
+try:
+    from setuptools import Distribution
+except ImportError:
+    from distutils.dist import Distribution
+
+egg_info_re = re.compile(
+    r"""
+    (?P<name>.+?)-(?P<ver>.+?)
+    (-(?P<pyver>py\d\.\d+)
+     (-(?P<arch>.+?))?
+    )?.egg$""",
+    re.VERBOSE,
+)
+
+
+class _bdist_wheel_tag(bdist_wheel):
+    # allow the client to override the default generated wheel tag
+    # The default bdist_wheel implementation uses python and abi tags
+    # of the running python process. This is not suitable for
+    # generating/repackaging prebuild binaries.
+
+    full_tag_supplied = False
+    full_tag = None  # None or a (pytag, soabitag, plattag) triple
+
+    def get_tag(self):
+        if self.full_tag_supplied and self.full_tag is not None:
+            return self.full_tag
+        else:
+            return bdist_wheel.get_tag(self)
+
+
+def egg2wheel(egg_path: str, dest_dir: str) -> None:
+    filename = os.path.basename(egg_path)
+    match = egg_info_re.match(filename)
+    if not match:
+        raise WheelError(f"Invalid egg file name: {filename}")
+
+    egg_info = match.groupdict()
+    dir = tempfile.mkdtemp(suffix="_e2w")
+    if os.path.isfile(egg_path):
+        # assume we have a bdist_egg otherwise
+        with zipfile.ZipFile(egg_path) as egg:
+            egg.extractall(dir)
+    else:
+        # support buildout-style installed eggs directories
+        for pth in os.listdir(egg_path):
+            src = os.path.join(egg_path, pth)
+            if os.path.isfile(src):
+                shutil.copy2(src, dir)
+            else:
+                shutil.copytree(src, os.path.join(dir, pth))
+
+    pyver = egg_info["pyver"]
+    if pyver:
+        pyver = egg_info["pyver"] = pyver.replace(".", "")
+
+    arch = (egg_info["arch"] or "any").replace(".", "_").replace("-", "_")
+
+    # assume all binary eggs are for CPython
+    abi = "cp" + pyver[2:] if arch != "any" else "none"
+
+    root_is_purelib = egg_info["arch"] is None
+    if root_is_purelib:
+        bw = bdist_wheel(Distribution())
+    else:
+        bw = _bdist_wheel_tag(Distribution())
+
+    bw.root_is_pure = root_is_purelib
+    bw.python_tag = pyver
+    bw.plat_name_supplied = True
+    bw.plat_name = egg_info["arch"] or "any"
+    if not root_is_purelib:
+        bw.full_tag_supplied = True
+        bw.full_tag = (pyver, abi, arch)
+
+    dist_info_dir = os.path.join(dir, "{name}-{ver}.dist-info".format(**egg_info))
+    bw.egg2dist(os.path.join(dir, "EGG-INFO"), dist_info_dir)
+    bw.write_wheelfile(dist_info_dir, generator="egg2wheel")
+    wheel_name = "{name}-{ver}-{pyver}-{}-{}.whl".format(abi, arch, **egg_info)
+    with WheelFile(os.path.join(dest_dir, wheel_name), "w") as wf:
+        wf.write_files(dir)
+
+    shutil.rmtree(dir)
+
+
+def parse_wininst_info(wininfo_name, egginfo_name):
+    """Extract metadata from filenames.
+
+    Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
+    the installer filename and the name of the egg-info directory embedded in
+    the zipfile (if any).
+
+    The egginfo filename has the format::
+
+        name-ver(-pyver)(-arch).egg-info
+
+    The installer filename has the format::
+
+        name-ver.arch(-pyver).exe
+
+    Some things to note:
+
+    1. The installer filename is not definitive. An installer can be renamed
+       and work perfectly well as an installer. So more reliable data should
+       be used whenever possible.
+    2. The egg-info data should be preferred for the name and version, because
+       these come straight from the distutils metadata, and are mandatory.
+    3. The pyver from the egg-info data should be ignored, as it is
+       constructed from the version of Python used to build the installer,
+       which is irrelevant - the installer filename is correct here (even to
+       the point that when it's not there, any version is implied).
+    4. The architecture must be taken from the installer filename, as it is
+       not included in the egg-info data.
+    5. Architecture-neutral installers still have an architecture because the
+       installer format itself (being executable) is architecture-specific. We
+       should therefore ignore the architecture if the content is pure-python.
+    """
+
+    egginfo = None
+    if egginfo_name:
+        egginfo = egg_info_re.search(egginfo_name)
+        if not egginfo:
+            raise ValueError(f"Egg info filename {egginfo_name} is not valid")
+
+    # Parse the wininst filename
+    # 1. Distribution name (up to the first '-')
+    w_name, sep, rest = wininfo_name.partition("-")
+    if not sep:
+        raise ValueError(f"Installer filename {wininfo_name} is not valid")
+
+    # Strip '.exe'
+    rest = rest[:-4]
+    # 2. Python version (from the last '-', must start with 'py')
+    rest2, sep, w_pyver = rest.rpartition("-")
+    if sep and w_pyver.startswith("py"):
+        rest = rest2
+        w_pyver = w_pyver.replace(".", "")
+    else:
+        # Not version specific - use py2.py3. While it is possible that
+        # pure-Python code is not compatible with both Python 2 and 3, there
+        # is no way of knowing from the wininst format, so we assume the best
+        # here (the user can always manually rename the wheel to be more
+        # restrictive if needed).
+        w_pyver = "py2.py3"
+    # 3. Version and architecture
+    w_ver, sep, w_arch = rest.rpartition(".")
+    if not sep:
+        raise ValueError(f"Installer filename {wininfo_name} is not valid")
+
+    if egginfo:
+        w_name = egginfo.group("name")
+        w_ver = egginfo.group("ver")
+
+    return {"name": w_name, "ver": w_ver, "arch": w_arch, "pyver": w_pyver}
+
+
+def wininst2wheel(path, dest_dir):
+    with zipfile.ZipFile(path) as bdw:
+        # Search for egg-info in the archive
+        egginfo_name = None
+        for filename in bdw.namelist():
+            if ".egg-info" in filename:
+                egginfo_name = filename
+                break
+
+        info = parse_wininst_info(os.path.basename(path), egginfo_name)
+
+        root_is_purelib = True
+        for zipinfo in bdw.infolist():
+            if zipinfo.filename.startswith("PLATLIB"):
+                root_is_purelib = False
+                break
+        if root_is_purelib:
+            paths = {"purelib": ""}
+        else:
+            paths = {"platlib": ""}
+
+        dist_info = "{name}-{ver}".format(**info)
+        datadir = "%s.data/" % dist_info
+
+        # rewrite paths to trick ZipFile into extracting an egg
+        # XXX grab wininst .ini - between .exe, padding, and first zip file.
+        members = []
+        egginfo_name = ""
+        for zipinfo in bdw.infolist():
+            key, basename = zipinfo.filename.split("/", 1)
+            key = key.lower()
+            basepath = paths.get(key, None)
+            if basepath is None:
+                basepath = datadir + key.lower() + "/"
+            oldname = zipinfo.filename
+            newname = basepath + basename
+            zipinfo.filename = newname
+            del bdw.NameToInfo[oldname]
+            bdw.NameToInfo[newname] = zipinfo
+            # Collect member names, but omit '' (from an entry like "PLATLIB/"
+            if newname:
+                members.append(newname)
+            # Remember egg-info name for the egg2dist call below
+            if not egginfo_name:
+                if newname.endswith(".egg-info"):
+                    egginfo_name = newname
+                elif ".egg-info/" in newname:
+                    egginfo_name, sep, _ = newname.rpartition("/")
+        dir = tempfile.mkdtemp(suffix="_b2w")
+        bdw.extractall(dir, members)
+
+    # egg2wheel
+    abi = "none"
+    pyver = info["pyver"]
+    arch = (info["arch"] or "any").replace(".", "_").replace("-", "_")
+    # Wininst installers always have arch even if they are not
+    # architecture-specific (because the format itself is).
+    # So, assume the content is architecture-neutral if root is purelib.
+    if root_is_purelib:
+        arch = "any"
+    # If the installer is architecture-specific, it's almost certainly also
+    # CPython-specific.
+    if arch != "any":
+        pyver = pyver.replace("py", "cp")
+    wheel_name = "-".join((dist_info, pyver, abi, arch))
+    if root_is_purelib:
+        bw = bdist_wheel(Distribution())
+    else:
+        bw = _bdist_wheel_tag(Distribution())
+
+    bw.root_is_pure = root_is_purelib
+    bw.python_tag = pyver
+    bw.plat_name_supplied = True
+    bw.plat_name = info["arch"] or "any"
+
+    if not root_is_purelib:
+        bw.full_tag_supplied = True
+        bw.full_tag = (pyver, abi, arch)
+
+    dist_info_dir = os.path.join(dir, "%s.dist-info" % dist_info)
+    bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
+    bw.write_wheelfile(dist_info_dir, generator="wininst2wheel")
+
+    wheel_path = os.path.join(dest_dir, wheel_name)
+    with WheelFile(wheel_path, "w") as wf:
+        wf.write_files(dir)
+
+    shutil.rmtree(dir)
+
+
+def convert(files, dest_dir, verbose):
+    for pat in files:
+        for installer in iglob(pat):
+            if os.path.splitext(installer)[1] == ".egg":
+                conv = egg2wheel
+            else:
+                conv = wininst2wheel
+
+            if verbose:
+                print(f"{installer}... ", flush=True)
+
+            conv(installer, dest_dir)
+            if verbose:
+                print("OK")
diff --git a/src/wheel/cli/pack.py b/src/wheel/cli/pack.py
new file mode 100644 (file)
index 0000000..e7bb96d
--- /dev/null
@@ -0,0 +1,85 @@
+from __future__ import annotations
+
+import email.policy
+import os.path
+import re
+from email.generator import BytesGenerator
+from email.parser import BytesParser
+
+from wheel.cli import WheelError
+from wheel.wheelfile import WheelFile
+
+DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$")
+
+
+def pack(directory: str, dest_dir: str, build_number: str | None) -> None:
+    """Repack a previously unpacked wheel directory into a new wheel file.
+
+    The .dist-info/WHEEL file must contain one or more tags so that the target
+    wheel file name can be determined.
+
+    :param directory: The unpacked wheel directory
+    :param dest_dir: Destination directory (defaults to the current directory)
+    """
+    # Find the .dist-info directory
+    dist_info_dirs = [
+        fn
+        for fn in os.listdir(directory)
+        if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)
+    ]
+    if len(dist_info_dirs) > 1:
+        raise WheelError(f"Multiple .dist-info directories found in {directory}")
+    elif not dist_info_dirs:
+        raise WheelError(f"No .dist-info directories found in {directory}")
+
+    # Determine the target wheel filename
+    dist_info_dir = dist_info_dirs[0]
+    name_version = DIST_INFO_RE.match(dist_info_dir).group("namever")
+
+    # Read the tags and the existing build number from .dist-info/WHEEL
+    wheel_file_path = os.path.join(directory, dist_info_dir, "WHEEL")
+    with open(wheel_file_path, "rb") as f:
+        info = BytesParser(policy=email.policy.compat32).parse(f)
+        tags: list[str] = info.get_all("Tag", [])
+        existing_build_number = info.get("Build")
+
+        if not tags:
+            raise WheelError(
+                "No tags present in {}/WHEEL; cannot determine target wheel "
+                "filename".format(dist_info_dir)
+            )
+
+    # Set the wheel file name and add/replace/remove the Build tag in .dist-info/WHEEL
+    build_number = build_number if build_number is not None else existing_build_number
+    if build_number is not None:
+        del info["Build"]
+        if build_number:
+            info["Build"] = build_number
+            name_version += "-" + build_number
+
+        if build_number != existing_build_number:
+            with open(wheel_file_path, "wb") as f:
+                BytesGenerator(f, maxheaderlen=0).flatten(info)
+
+    # Reassemble the tags for the wheel file
+    tagline = compute_tagline(tags)
+
+    # Repack the wheel
+    wheel_path = os.path.join(dest_dir, f"{name_version}-{tagline}.whl")
+    with WheelFile(wheel_path, "w") as wf:
+        print(f"Repacking wheel as {wheel_path}...", end="", flush=True)
+        wf.write_files(directory)
+
+    print("OK")
+
+
+def compute_tagline(tags: list[str]) -> str:
+    """Compute a tagline from a list of tags.
+
+    :param tags: A list of tags
+    :return: A tagline
+    """
+    impls = sorted({tag.split("-")[0] for tag in tags})
+    abivers = sorted({tag.split("-")[1] for tag in tags})
+    platforms = sorted({tag.split("-")[2] for tag in tags})
+    return "-".join([".".join(impls), ".".join(abivers), ".".join(platforms)])
diff --git a/src/wheel/cli/tags.py b/src/wheel/cli/tags.py
new file mode 100644 (file)
index 0000000..88da72e
--- /dev/null
@@ -0,0 +1,139 @@
+from __future__ import annotations
+
+import email.policy
+import itertools
+import os
+from collections.abc import Iterable
+from email.parser import BytesParser
+
+from ..wheelfile import WheelFile
+
+
+def _compute_tags(original_tags: Iterable[str], new_tags: str | None) -> set[str]:
+    """Add or replace tags. Supports dot-separated tags"""
+    if new_tags is None:
+        return set(original_tags)
+
+    if new_tags.startswith("+"):
+        return {*original_tags, *new_tags[1:].split(".")}
+
+    if new_tags.startswith("-"):
+        return set(original_tags) - set(new_tags[1:].split("."))
+
+    return set(new_tags.split("."))
+
+
+def tags(
+    wheel: str,
+    python_tags: str | None = None,
+    abi_tags: str | None = None,
+    platform_tags: str | None = None,
+    build_tag: str | None = None,
+    remove: bool = False,
+) -> str:
+    """Change the tags on a wheel file.
+
+    The tags are left unchanged if they are not specified. To specify "none",
+    use ["none"]. To append to the previous tags, a tag should start with a
+    "+".  If a tag starts with "-", it will be removed from existing tags.
+    Processing is done left to right.
+
+    :param wheel: The paths to the wheels
+    :param python_tags: The Python tags to set
+    :param abi_tags: The ABI tags to set
+    :param platform_tags: The platform tags to set
+    :param build_tag: The build tag to set
+    :param remove: Remove the original wheel
+    """
+    with WheelFile(wheel, "r") as f:
+        assert f.filename, f"{f.filename} must be available"
+
+        wheel_info = f.read(f.dist_info_path + "/WHEEL")
+        info = BytesParser(policy=email.policy.compat32).parsebytes(wheel_info)
+
+        original_wheel_name = os.path.basename(f.filename)
+        namever = f.parsed_filename.group("namever")
+        build = f.parsed_filename.group("build")
+        original_python_tags = f.parsed_filename.group("pyver").split(".")
+        original_abi_tags = f.parsed_filename.group("abi").split(".")
+        original_plat_tags = f.parsed_filename.group("plat").split(".")
+
+    tags: list[str] = info.get_all("Tag", [])
+    existing_build_tag = info.get("Build")
+
+    impls = {tag.split("-")[0] for tag in tags}
+    abivers = {tag.split("-")[1] for tag in tags}
+    platforms = {tag.split("-")[2] for tag in tags}
+
+    if impls != set(original_python_tags):
+        msg = f"Wheel internal tags {impls!r} != filename tags {original_python_tags!r}"
+        raise AssertionError(msg)
+
+    if abivers != set(original_abi_tags):
+        msg = f"Wheel internal tags {abivers!r} != filename tags {original_abi_tags!r}"
+        raise AssertionError(msg)
+
+    if platforms != set(original_plat_tags):
+        msg = (
+            f"Wheel internal tags {platforms!r} != filename tags {original_plat_tags!r}"
+        )
+        raise AssertionError(msg)
+
+    if existing_build_tag != build:
+        msg = (
+            f"Incorrect filename '{build}' "
+            f"& *.dist-info/WHEEL '{existing_build_tag}' build numbers"
+        )
+        raise AssertionError(msg)
+
+    # Start changing as needed
+    if build_tag is not None:
+        build = build_tag
+
+    final_python_tags = sorted(_compute_tags(original_python_tags, python_tags))
+    final_abi_tags = sorted(_compute_tags(original_abi_tags, abi_tags))
+    final_plat_tags = sorted(_compute_tags(original_plat_tags, platform_tags))
+
+    final_tags = [
+        namever,
+        ".".join(final_python_tags),
+        ".".join(final_abi_tags),
+        ".".join(final_plat_tags),
+    ]
+    if build:
+        final_tags.insert(1, build)
+
+    final_wheel_name = "-".join(final_tags) + ".whl"
+
+    if original_wheel_name != final_wheel_name:
+        del info["Tag"], info["Build"]
+        for a, b, c in itertools.product(
+            final_python_tags, final_abi_tags, final_plat_tags
+        ):
+            info["Tag"] = f"{a}-{b}-{c}"
+        if build:
+            info["Build"] = build
+
+        original_wheel_path = os.path.join(
+            os.path.dirname(f.filename), original_wheel_name
+        )
+        final_wheel_path = os.path.join(os.path.dirname(f.filename), final_wheel_name)
+
+        with WheelFile(original_wheel_path, "r") as fin, WheelFile(
+            final_wheel_path, "w"
+        ) as fout:
+            fout.comment = fin.comment  # preserve the comment
+            for item in fin.infolist():
+                if item.is_dir():
+                    continue
+                if item.filename == f.dist_info_path + "/RECORD":
+                    continue
+                if item.filename == f.dist_info_path + "/WHEEL":
+                    fout.writestr(item, info.as_bytes())
+                else:
+                    fout.writestr(item, fin.read(item))
+
+        if remove:
+            os.remove(original_wheel_path)
+
+    return final_wheel_name
diff --git a/src/wheel/cli/unpack.py b/src/wheel/cli/unpack.py
new file mode 100644 (file)
index 0000000..d48840e
--- /dev/null
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+from ..wheelfile import WheelFile
+
+
+def unpack(path: str, dest: str = ".") -> None:
+    """Unpack a wheel.
+
+    Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
+    is the package name and {ver} its version.
+
+    :param path: The path to the wheel.
+    :param dest: Destination directory (default to current directory).
+    """
+    with WheelFile(path) as wf:
+        namever = wf.parsed_filename.group("namever")
+        destination = Path(dest) / namever
+        print(f"Unpacking to: {destination}...", end="", flush=True)
+        for zinfo in wf.filelist:
+            wf.extract(zinfo, destination)
+
+            # Set permissions to the same values as they were set in the archive
+            # We have to do this manually due to
+            # https://github.com/python/cpython/issues/59999
+            permissions = zinfo.external_attr >> 16 & 0o777
+            destination.joinpath(zinfo.filename).chmod(permissions)
+
+    print("OK")
diff --git a/src/wheel/macosx_libfile.py b/src/wheel/macosx_libfile.py
new file mode 100644 (file)
index 0000000..3d19984
--- /dev/null
@@ -0,0 +1,471 @@
+"""
+This module contains function to analyse dynamic library
+headers to extract system information
+
+Currently only for MacOSX
+
+Library file on macosx system starts with Mach-O or Fat field.
+This can be distinguish by first 32 bites and it is called magic number.
+Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means
+reversed bytes order.
+Both fields can occur in two types: 32 and 64 bytes.
+
+FAT field inform that this library contains few version of library
+(typically for different types version). It contains
+information where Mach-O headers starts.
+
+Each section started with Mach-O header contains one library
+(So if file starts with this field it contains only one version).
+
+After filed Mach-O there are section fields.
+Each of them starts with two fields:
+cmd - magic number for this command
+cmdsize - total size occupied by this section information.
+
+In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier)
+and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting,
+because them contains information about minimal system version.
+
+Important remarks:
+- For fat files this implementation looks for maximum number version.
+  It not check if it is 32 or 64 and do not compare it with currently built package.
+  So it is possible to false report higher version that needed.
+- All structures signatures are taken form macosx header files.
+- I think that binary format will be more stable than `otool` output.
+  and if apple introduce some changes both implementation will need to be updated.
+- The system compile will set the deployment target no lower than
+  11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment
+  target when the arm64 target is 11.0.
+"""
+
+from __future__ import annotations
+
+import ctypes
+import os
+import sys
+
+"""here the needed const and struct from mach-o header files"""
+
+FAT_MAGIC = 0xCAFEBABE
+FAT_CIGAM = 0xBEBAFECA
+FAT_MAGIC_64 = 0xCAFEBABF
+FAT_CIGAM_64 = 0xBFBAFECA
+MH_MAGIC = 0xFEEDFACE
+MH_CIGAM = 0xCEFAEDFE
+MH_MAGIC_64 = 0xFEEDFACF
+MH_CIGAM_64 = 0xCFFAEDFE
+
+LC_VERSION_MIN_MACOSX = 0x24
+LC_BUILD_VERSION = 0x32
+
+CPU_TYPE_ARM64 = 0x0100000C
+
+mach_header_fields = [
+    ("magic", ctypes.c_uint32),
+    ("cputype", ctypes.c_int),
+    ("cpusubtype", ctypes.c_int),
+    ("filetype", ctypes.c_uint32),
+    ("ncmds", ctypes.c_uint32),
+    ("sizeofcmds", ctypes.c_uint32),
+    ("flags", ctypes.c_uint32),
+]
+"""
+struct mach_header {
+    uint32_t   magic;          /* mach magic number identifier */
+    cpu_type_t cputype;        /* cpu specifier */
+    cpu_subtype_t      cpusubtype;     /* machine specifier */
+    uint32_t   filetype;       /* type of file */
+    uint32_t   ncmds;          /* number of load commands */
+    uint32_t   sizeofcmds;     /* the size of all the load commands */
+    uint32_t   flags;          /* flags */
+};
+typedef integer_t cpu_type_t;
+typedef integer_t cpu_subtype_t;
+"""
+
+mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)]
+"""
+struct mach_header_64 {
+    uint32_t   magic;          /* mach magic number identifier */
+    cpu_type_t cputype;        /* cpu specifier */
+    cpu_subtype_t      cpusubtype;     /* machine specifier */
+    uint32_t   filetype;       /* type of file */
+    uint32_t   ncmds;          /* number of load commands */
+    uint32_t   sizeofcmds;     /* the size of all the load commands */
+    uint32_t   flags;          /* flags */
+    uint32_t   reserved;       /* reserved */
+};
+"""
+
+fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)]
+"""
+struct fat_header {
+    uint32_t   magic;          /* FAT_MAGIC or FAT_MAGIC_64 */
+    uint32_t   nfat_arch;      /* number of structs that follow */
+};
+"""
+
+fat_arch_fields = [
+    ("cputype", ctypes.c_int),
+    ("cpusubtype", ctypes.c_int),
+    ("offset", ctypes.c_uint32),
+    ("size", ctypes.c_uint32),
+    ("align", ctypes.c_uint32),
+]
+"""
+struct fat_arch {
+    cpu_type_t cputype;        /* cpu specifier (int) */
+    cpu_subtype_t      cpusubtype;     /* machine specifier (int) */
+    uint32_t   offset;         /* file offset to this object file */
+    uint32_t   size;           /* size of this object file */
+    uint32_t   align;          /* alignment as a power of 2 */
+};
+"""
+
+fat_arch_64_fields = [
+    ("cputype", ctypes.c_int),
+    ("cpusubtype", ctypes.c_int),
+    ("offset", ctypes.c_uint64),
+    ("size", ctypes.c_uint64),
+    ("align", ctypes.c_uint32),
+    ("reserved", ctypes.c_uint32),
+]
+"""
+struct fat_arch_64 {
+    cpu_type_t cputype;        /* cpu specifier (int) */
+    cpu_subtype_t      cpusubtype;     /* machine specifier (int) */
+    uint64_t   offset;         /* file offset to this object file */
+    uint64_t   size;           /* size of this object file */
+    uint32_t   align;          /* alignment as a power of 2 */
+    uint32_t   reserved;       /* reserved */
+};
+"""
+
+segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
+"""base for reading segment info"""
+
+segment_command_fields = [
+    ("cmd", ctypes.c_uint32),
+    ("cmdsize", ctypes.c_uint32),
+    ("segname", ctypes.c_char * 16),
+    ("vmaddr", ctypes.c_uint32),
+    ("vmsize", ctypes.c_uint32),
+    ("fileoff", ctypes.c_uint32),
+    ("filesize", ctypes.c_uint32),
+    ("maxprot", ctypes.c_int),
+    ("initprot", ctypes.c_int),
+    ("nsects", ctypes.c_uint32),
+    ("flags", ctypes.c_uint32),
+]
+"""
+struct segment_command { /* for 32-bit architectures */
+    uint32_t   cmd;            /* LC_SEGMENT */
+    uint32_t   cmdsize;        /* includes sizeof section structs */
+    char               segname[16];    /* segment name */
+    uint32_t   vmaddr;         /* memory address of this segment */
+    uint32_t   vmsize;         /* memory size of this segment */
+    uint32_t   fileoff;        /* file offset of this segment */
+    uint32_t   filesize;       /* amount to map from the file */
+    vm_prot_t  maxprot;        /* maximum VM protection */
+    vm_prot_t  initprot;       /* initial VM protection */
+    uint32_t   nsects;         /* number of sections in segment */
+    uint32_t   flags;          /* flags */
+};
+typedef int vm_prot_t;
+"""
+
+segment_command_fields_64 = [
+    ("cmd", ctypes.c_uint32),
+    ("cmdsize", ctypes.c_uint32),
+    ("segname", ctypes.c_char * 16),
+    ("vmaddr", ctypes.c_uint64),
+    ("vmsize", ctypes.c_uint64),
+    ("fileoff", ctypes.c_uint64),
+    ("filesize", ctypes.c_uint64),
+    ("maxprot", ctypes.c_int),
+    ("initprot", ctypes.c_int),
+    ("nsects", ctypes.c_uint32),
+    ("flags", ctypes.c_uint32),
+]
+"""
+struct segment_command_64 { /* for 64-bit architectures */
+    uint32_t   cmd;            /* LC_SEGMENT_64 */
+    uint32_t   cmdsize;        /* includes sizeof section_64 structs */
+    char               segname[16];    /* segment name */
+    uint64_t   vmaddr;         /* memory address of this segment */
+    uint64_t   vmsize;         /* memory size of this segment */
+    uint64_t   fileoff;        /* file offset of this segment */
+    uint64_t   filesize;       /* amount to map from the file */
+    vm_prot_t  maxprot;        /* maximum VM protection */
+    vm_prot_t  initprot;       /* initial VM protection */
+    uint32_t   nsects;         /* number of sections in segment */
+    uint32_t   flags;          /* flags */
+};
+"""
+
+version_min_command_fields = segment_base_fields + [
+    ("version", ctypes.c_uint32),
+    ("sdk", ctypes.c_uint32),
+]
+"""
+struct version_min_command {
+    uint32_t   cmd;            /* LC_VERSION_MIN_MACOSX or
+                               LC_VERSION_MIN_IPHONEOS or
+                               LC_VERSION_MIN_WATCHOS or
+                               LC_VERSION_MIN_TVOS */
+    uint32_t   cmdsize;        /* sizeof(struct min_version_command) */
+    uint32_t   version;        /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+    uint32_t   sdk;            /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+};
+"""
+
+build_version_command_fields = segment_base_fields + [
+    ("platform", ctypes.c_uint32),
+    ("minos", ctypes.c_uint32),
+    ("sdk", ctypes.c_uint32),
+    ("ntools", ctypes.c_uint32),
+]
+"""
+struct build_version_command {
+    uint32_t   cmd;            /* LC_BUILD_VERSION */
+    uint32_t   cmdsize;        /* sizeof(struct build_version_command) plus */
+                                /* ntools * sizeof(struct build_tool_version) */
+    uint32_t   platform;       /* platform */
+    uint32_t   minos;          /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+    uint32_t   sdk;            /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+    uint32_t   ntools;         /* number of tool entries following this */
+};
+"""
+
+
+def swap32(x):
+    return (
+        ((x << 24) & 0xFF000000)
+        | ((x << 8) & 0x00FF0000)
+        | ((x >> 8) & 0x0000FF00)
+        | ((x >> 24) & 0x000000FF)
+    )
+
+
+def get_base_class_and_magic_number(lib_file, seek=None):
+    if seek is None:
+        seek = lib_file.tell()
+    else:
+        lib_file.seek(seek)
+    magic_number = ctypes.c_uint32.from_buffer_copy(
+        lib_file.read(ctypes.sizeof(ctypes.c_uint32))
+    ).value
+
+    # Handle wrong byte order
+    if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
+        if sys.byteorder == "little":
+            BaseClass = ctypes.BigEndianStructure
+        else:
+            BaseClass = ctypes.LittleEndianStructure
+
+        magic_number = swap32(magic_number)
+    else:
+        BaseClass = ctypes.Structure
+
+    lib_file.seek(seek)
+    return BaseClass, magic_number
+
+
+def read_data(struct_class, lib_file):
+    return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class)))
+
+
+def extract_macosx_min_system_version(path_to_lib):
+    with open(path_to_lib, "rb") as lib_file:
+        BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
+        if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
+            return
+
+        if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
+
+            class FatHeader(BaseClass):
+                _fields_ = fat_header_fields
+
+            fat_header = read_data(FatHeader, lib_file)
+            if magic_number == FAT_MAGIC:
+
+                class FatArch(BaseClass):
+                    _fields_ = fat_arch_fields
+
+            else:
+
+                class FatArch(BaseClass):
+                    _fields_ = fat_arch_64_fields
+
+            fat_arch_list = [
+                read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)
+            ]
+
+            versions_list = []
+            for el in fat_arch_list:
+                try:
+                    version = read_mach_header(lib_file, el.offset)
+                    if version is not None:
+                        if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1:
+                            # Xcode will not set the deployment target below 11.0.0
+                            # for the arm64 architecture. Ignore the arm64 deployment
+                            # in fat binaries when the target is 11.0.0, that way
+                            # the other architectures can select a lower deployment
+                            # target.
+                            # This is safe because there is no arm64 variant for
+                            # macOS 10.15 or earlier.
+                            if version == (11, 0, 0):
+                                continue
+                        versions_list.append(version)
+                except ValueError:
+                    pass
+
+            if len(versions_list) > 0:
+                return max(versions_list)
+            else:
+                return None
+
+        else:
+            try:
+                return read_mach_header(lib_file, 0)
+            except ValueError:
+                """when some error during read library files"""
+                return None
+
+
+def read_mach_header(lib_file, seek=None):
+    """
+    This funcition parse mach-O header and extract
+    information about minimal system version
+
+    :param lib_file: reference to opened library file with pointer
+    """
+    if seek is not None:
+        lib_file.seek(seek)
+    base_class, magic_number = get_base_class_and_magic_number(lib_file)
+    arch = "32" if magic_number == MH_MAGIC else "64"
+
+    class SegmentBase(base_class):
+        _fields_ = segment_base_fields
+
+    if arch == "32":
+
+        class MachHeader(base_class):
+            _fields_ = mach_header_fields
+
+    else:
+
+        class MachHeader(base_class):
+            _fields_ = mach_header_fields_64
+
+    mach_header = read_data(MachHeader, lib_file)
+    for _i in range(mach_header.ncmds):
+        pos = lib_file.tell()
+        segment_base = read_data(SegmentBase, lib_file)
+        lib_file.seek(pos)
+        if segment_base.cmd == LC_VERSION_MIN_MACOSX:
+
+            class VersionMinCommand(base_class):
+                _fields_ = version_min_command_fields
+
+            version_info = read_data(VersionMinCommand, lib_file)
+            return parse_version(version_info.version)
+        elif segment_base.cmd == LC_BUILD_VERSION:
+
+            class VersionBuild(base_class):
+                _fields_ = build_version_command_fields
+
+            version_info = read_data(VersionBuild, lib_file)
+            return parse_version(version_info.minos)
+        else:
+            lib_file.seek(pos + segment_base.cmdsize)
+            continue
+
+
+def parse_version(version):
+    x = (version & 0xFFFF0000) >> 16
+    y = (version & 0x0000FF00) >> 8
+    z = version & 0x000000FF
+    return x, y, z
+
+
+def calculate_macosx_platform_tag(archive_root, platform_tag):
+    """
+    Calculate proper macosx platform tag basing on files which are included to wheel
+
+    Example platform tag `macosx-10.14-x86_64`
+    """
+    prefix, base_version, suffix = platform_tag.split("-")
+    base_version = tuple(int(x) for x in base_version.split("."))
+    base_version = base_version[:2]
+    if base_version[0] > 10:
+        base_version = (base_version[0], 0)
+    assert len(base_version) == 2
+    if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
+        deploy_target = tuple(
+            int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".")
+        )
+        deploy_target = deploy_target[:2]
+        if deploy_target[0] > 10:
+            deploy_target = (deploy_target[0], 0)
+        if deploy_target < base_version:
+            sys.stderr.write(
+                "[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than "
+                "the version on which the Python interpreter was compiled ({}), and "
+                "will be ignored.\n".format(
+                    ".".join(str(x) for x in deploy_target),
+                    ".".join(str(x) for x in base_version),
+                )
+            )
+        else:
+            base_version = deploy_target
+
+    assert len(base_version) == 2
+    start_version = base_version
+    versions_dict = {}
+    for dirpath, _dirnames, filenames in os.walk(archive_root):
+        for filename in filenames:
+            if filename.endswith(".dylib") or filename.endswith(".so"):
+                lib_path = os.path.join(dirpath, filename)
+                min_ver = extract_macosx_min_system_version(lib_path)
+                if min_ver is not None:
+                    min_ver = min_ver[0:2]
+                    if min_ver[0] > 10:
+                        min_ver = (min_ver[0], 0)
+                    versions_dict[lib_path] = min_ver
+
+    if len(versions_dict) > 0:
+        base_version = max(base_version, max(versions_dict.values()))
+
+    # macosx platform tag do not support minor bugfix release
+    fin_base_version = "_".join([str(x) for x in base_version])
+    if start_version < base_version:
+        problematic_files = [k for k, v in versions_dict.items() if v > start_version]
+        problematic_files = "\n".join(problematic_files)
+        if len(problematic_files) == 1:
+            files_form = "this file"
+        else:
+            files_form = "these files"
+        error_message = (
+            "[WARNING] This wheel needs a higher macOS version than {}  "
+            "To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least "
+            + fin_base_version
+            + " or recreate "
+            + files_form
+            + " with lower "
+            "MACOSX_DEPLOYMENT_TARGET:  \n" + problematic_files
+        )
+
+        if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
+            error_message = error_message.format(
+                "is set in MACOSX_DEPLOYMENT_TARGET variable."
+            )
+        else:
+            error_message = error_message.format(
+                "the version your Python interpreter is compiled against."
+            )
+
+        sys.stderr.write(error_message)
+
+    platform_tag = prefix + "_" + fin_base_version + "_" + suffix
+    return platform_tag
diff --git a/src/wheel/metadata.py b/src/wheel/metadata.py
new file mode 100644 (file)
index 0000000..ddcb90e
--- /dev/null
@@ -0,0 +1,179 @@
+"""
+Tools for converting old- to new-style metadata.
+"""
+from __future__ import annotations
+
+import functools
+import itertools
+import os.path
+import re
+import textwrap
+from email.message import Message
+from email.parser import Parser
+from typing import Iterator
+
+from .vendored.packaging.requirements import Requirement
+
+
+def _nonblank(str):
+    return str and not str.startswith("#")
+
+
+@functools.singledispatch
+def yield_lines(iterable):
+    r"""
+    Yield valid lines of a string or iterable.
+    >>> list(yield_lines(''))
+    []
+    >>> list(yield_lines(['foo', 'bar']))
+    ['foo', 'bar']
+    >>> list(yield_lines('foo\nbar'))
+    ['foo', 'bar']
+    >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
+    ['foo', 'baz #comment']
+    >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
+    ['foo', 'bar', 'baz', 'bing']
+    """
+    return itertools.chain.from_iterable(map(yield_lines, iterable))
+
+
+@yield_lines.register(str)
+def _(text):
+    return filter(_nonblank, map(str.strip, text.splitlines()))
+
+
+def split_sections(s):
+    """Split a string or iterable thereof into (section, content) pairs
+    Each ``section`` is a stripped version of the section header ("[section]")
+    and each ``content`` is a list of stripped lines excluding blank lines and
+    comment-only lines.  If there are any such lines before the first section
+    header, they're returned in a first ``section`` of ``None``.
+    """
+    section = None
+    content = []
+    for line in yield_lines(s):
+        if line.startswith("["):
+            if line.endswith("]"):
+                if section or content:
+                    yield section, content
+                section = line[1:-1].strip()
+                content = []
+            else:
+                raise ValueError("Invalid section heading", line)
+        else:
+            content.append(line)
+
+    # wrap up last segment
+    yield section, content
+
+
+def safe_extra(extra):
+    """Convert an arbitrary string to a standard 'extra' name
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+    """
+    return re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()
+
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub("[^A-Za-z0-9.]+", "-", name)
+
+
+def requires_to_requires_dist(requirement: Requirement) -> str:
+    """Return the version specifier for a requirement in PEP 345/566 fashion."""
+    if getattr(requirement, "url", None):
+        return " @ " + requirement.url
+
+    requires_dist = []
+    for spec in requirement.specifier:
+        requires_dist.append(spec.operator + spec.version)
+
+    if requires_dist:
+        return " " + ",".join(sorted(requires_dist))
+    else:
+        return ""
+
+
+def convert_requirements(requirements: list[str]) -> Iterator[str]:
+    """Yield Requires-Dist: strings for parsed requirements strings."""
+    for req in requirements:
+        parsed_requirement = Requirement(req)
+        spec = requires_to_requires_dist(parsed_requirement)
+        extras = ",".join(sorted(safe_extra(e) for e in parsed_requirement.extras))
+        if extras:
+            extras = f"[{extras}]"
+
+        yield safe_name(parsed_requirement.name) + extras + spec
+
+
+def generate_requirements(
+    extras_require: dict[str, list[str]]
+) -> Iterator[tuple[str, str]]:
+    """
+    Convert requirements from a setup()-style dictionary to
+    ('Requires-Dist', 'requirement') and ('Provides-Extra', 'extra') tuples.
+
+    extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
+    using the empty extra {'': [requirements]} to hold install_requires.
+    """
+    for extra, depends in extras_require.items():
+        condition = ""
+        extra = extra or ""
+        if ":" in extra:  # setuptools extra:condition syntax
+            extra, condition = extra.split(":", 1)
+
+        extra = safe_extra(extra)
+        if extra:
+            yield "Provides-Extra", extra
+            if condition:
+                condition = "(" + condition + ") and "
+            condition += "extra == '%s'" % extra
+
+        if condition:
+            condition = " ; " + condition
+
+        for new_req in convert_requirements(depends):
+            yield "Requires-Dist", new_req + condition
+
+
+def pkginfo_to_metadata(egg_info_path: str, pkginfo_path: str) -> Message:
+    """
+    Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
+    """
+    with open(pkginfo_path, encoding="utf-8") as headers:
+        pkg_info = Parser().parse(headers)
+
+    pkg_info.replace_header("Metadata-Version", "2.1")
+    # Those will be regenerated from `requires.txt`.
+    del pkg_info["Provides-Extra"]
+    del pkg_info["Requires-Dist"]
+    requires_path = os.path.join(egg_info_path, "requires.txt")
+    if os.path.exists(requires_path):
+        with open(requires_path, encoding="utf-8") as requires_file:
+            requires = requires_file.read()
+
+        parsed_requirements = sorted(split_sections(requires), key=lambda x: x[0] or "")
+        for extra, reqs in parsed_requirements:
+            for key, value in generate_requirements({extra: reqs}):
+                if (key, value) not in pkg_info.items():
+                    pkg_info[key] = value
+
+    description = pkg_info["Description"]
+    if description:
+        description_lines = pkg_info["Description"].splitlines()
+        dedented_description = "\n".join(
+            # if the first line of long_description is blank,
+            # the first line here will be indented.
+            (
+                description_lines[0].lstrip(),
+                textwrap.dedent("\n".join(description_lines[1:])),
+                "\n",
+            )
+        )
+        pkg_info.set_payload(dedented_description)
+        del pkg_info["Description"]
+
+    return pkg_info
diff --git a/src/wheel/util.py b/src/wheel/util.py
new file mode 100644 (file)
index 0000000..d98d98c
--- /dev/null
@@ -0,0 +1,26 @@
+from __future__ import annotations
+
+import base64
+import logging
+
+log = logging.getLogger("wheel")
+
+# ensure Python logging is configured
+try:
+    __import__("setuptools.logging")
+except ImportError:
+    # setuptools < ??
+    from . import _setuptools_logging
+
+    _setuptools_logging.configure()
+
+
+def urlsafe_b64encode(data: bytes) -> bytes:
+    """urlsafe_b64encode without padding"""
+    return base64.urlsafe_b64encode(data).rstrip(b"=")
+
+
+def urlsafe_b64decode(data: bytes) -> bytes:
+    """urlsafe_b64decode without padding"""
+    pad = b"=" * (4 - (len(data) & 3))
+    return base64.urlsafe_b64decode(data + pad)
diff --git a/src/wheel/vendored/__init__.py b/src/wheel/vendored/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/src/wheel/vendored/packaging/__init__.py b/src/wheel/vendored/packaging/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/src/wheel/vendored/packaging/_elffile.py b/src/wheel/vendored/packaging/_elffile.py
new file mode 100644 (file)
index 0000000..6fb19b3
--- /dev/null
@@ -0,0 +1,108 @@
+"""
+ELF file parser.
+
+This provides a class ``ELFFile`` that parses an ELF executable in a similar
+interface to ``ZipFile``. Only the read interface is implemented.
+
+Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
+ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+"""
+
+import enum
+import os
+import struct
+from typing import IO, Optional, Tuple
+
+
+class ELFInvalid(ValueError):
+    pass
+
+
+class EIClass(enum.IntEnum):
+    C32 = 1
+    C64 = 2
+
+
+class EIData(enum.IntEnum):
+    Lsb = 1
+    Msb = 2
+
+
+class EMachine(enum.IntEnum):
+    I386 = 3
+    S390 = 22
+    Arm = 40
+    X8664 = 62
+    AArc64 = 183
+
+
+class ELFFile:
+    """
+    Representation of an ELF executable.
+    """
+
+    def __init__(self, f: IO[bytes]) -> None:
+        self._f = f
+
+        try:
+            ident = self._read("16B")
+        except struct.error:
+            raise ELFInvalid("unable to parse identification")
+        magic = bytes(ident[:4])
+        if magic != b"\x7fELF":
+            raise ELFInvalid(f"invalid magic: {magic!r}")
+
+        self.capacity = ident[4]  # Format for program header (bitness).
+        self.encoding = ident[5]  # Data structure encoding (endianness).
+
+        try:
+            # e_fmt: Format for program header.
+            # p_fmt: Format for section header.
+            # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+            e_fmt, self._p_fmt, self._p_idx = {
+                (1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)),  # 32-bit LSB.
+                (1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)),  # 32-bit MSB.
+                (2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)),  # 64-bit LSB.
+                (2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)),  # 64-bit MSB.
+            }[(self.capacity, self.encoding)]
+        except KeyError:
+            raise ELFInvalid(
+                f"unrecognized capacity ({self.capacity}) or "
+                f"encoding ({self.encoding})"
+            )
+
+        try:
+            (
+                _,
+                self.machine,  # Architecture type.
+                _,
+                _,
+                self._e_phoff,  # Offset of program header.
+                _,
+                self.flags,  # Processor-specific flags.
+                _,
+                self._e_phentsize,  # Size of section.
+                self._e_phnum,  # Number of sections.
+            ) = self._read(e_fmt)
+        except struct.error as e:
+            raise ELFInvalid("unable to parse machine and section information") from e
+
+    def _read(self, fmt: str) -> Tuple[int, ...]:
+        return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
+
+    @property
+    def interpreter(self) -> Optional[str]:
+        """
+        The path recorded in the ``PT_INTERP`` section header.
+        """
+        for index in range(self._e_phnum):
+            self._f.seek(self._e_phoff + self._e_phentsize * index)
+            try:
+                data = self._read(self._p_fmt)
+            except struct.error:
+                continue
+            if data[self._p_idx[0]] != 3:  # Not PT_INTERP.
+                continue
+            self._f.seek(data[self._p_idx[1]])
+            return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
+        return None
diff --git a/src/wheel/vendored/packaging/_manylinux.py b/src/wheel/vendored/packaging/_manylinux.py
new file mode 100644 (file)
index 0000000..3705d50
--- /dev/null
@@ -0,0 +1,252 @@
+import collections
+import contextlib
+import functools
+import os
+import re
+import sys
+import warnings
+from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
+
+from ._elffile import EIClass, EIData, ELFFile, EMachine
+
+EF_ARM_ABIMASK = 0xFF000000
+EF_ARM_ABI_VER5 = 0x05000000
+EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+
+# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
+# as the type for `path` until then.
+@contextlib.contextmanager
+def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
+    try:
+        with open(path, "rb") as f:
+            yield ELFFile(f)
+    except (OSError, TypeError, ValueError):
+        yield None
+
+
+def _is_linux_armhf(executable: str) -> bool:
+    # hard-float ABI can be detected from the ELF header of the running
+    # process
+    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+    with _parse_elf(executable) as f:
+        return (
+            f is not None
+            and f.capacity == EIClass.C32
+            and f.encoding == EIData.Lsb
+            and f.machine == EMachine.Arm
+            and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
+            and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
+        )
+
+
+def _is_linux_i686(executable: str) -> bool:
+    with _parse_elf(executable) as f:
+        return (
+            f is not None
+            and f.capacity == EIClass.C32
+            and f.encoding == EIData.Lsb
+            and f.machine == EMachine.I386
+        )
+
+
+def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
+    if "armv7l" in archs:
+        return _is_linux_armhf(executable)
+    if "i686" in archs:
+        return _is_linux_i686(executable)
+    allowed_archs = {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x", "loongarch64"}
+    return any(arch in allowed_archs for arch in archs)
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _glibc_version_string_confstr() -> Optional[str]:
+    """
+    Primary implementation of glibc_version_string using os.confstr.
+    """
+    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+    # to be broken or missing. This strategy is used in the standard library
+    # platform module.
+    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+    try:
+        # Should be a string like "glibc 2.17".
+        version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
+        assert version_string is not None
+        _, version = version_string.rsplit()
+    except (AssertionError, AttributeError, OSError, ValueError):
+        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+        return None
+    return version
+
+
+def _glibc_version_string_ctypes() -> Optional[str]:
+    """
+    Fallback implementation of glibc_version_string using ctypes.
+    """
+    try:
+        import ctypes
+    except ImportError:
+        return None
+
+    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+    # manpage says, "If filename is NULL, then the returned handle is for the
+    # main program". This way we can let the linker do the work to figure out
+    # which libc our process is actually using.
+    #
+    # We must also handle the special case where the executable is not a
+    # dynamically linked executable. This can occur when using musl libc,
+    # for example. In this situation, dlopen() will error, leading to an
+    # OSError. Interestingly, at least in the case of musl, there is no
+    # errno set on the OSError. The single string argument used to construct
+    # OSError comes from libc itself and is therefore not portable to
+    # hard code here. In any case, failure to call dlopen() means we
+    # can proceed, so we bail on our attempt.
+    try:
+        process_namespace = ctypes.CDLL(None)
+    except OSError:
+        return None
+
+    try:
+        gnu_get_libc_version = process_namespace.gnu_get_libc_version
+    except AttributeError:
+        # Symbol doesn't exist -> therefore, we are not linked to
+        # glibc.
+        return None
+
+    # Call gnu_get_libc_version, which returns a string like "2.5"
+    gnu_get_libc_version.restype = ctypes.c_char_p
+    version_str: str = gnu_get_libc_version()
+    # py2 / py3 compatibility:
+    if not isinstance(version_str, str):
+        version_str = version_str.decode("ascii")
+
+    return version_str
+
+
+def _glibc_version_string() -> Optional[str]:
+    """Returns glibc version string, or None if not using glibc."""
+    return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+    """Parse glibc version.
+
+    We use a regexp instead of str.split because we want to discard any
+    random junk that might come after the minor version -- this might happen
+    in patched/forked versions of glibc (e.g. Linaro's version of glibc
+    uses version strings like "2.20-2014.11"). See gh-3588.
+    """
+    m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+    if not m:
+        warnings.warn(
+            f"Expected glibc version with 2 components major.minor,"
+            f" got: {version_str}",
+            RuntimeWarning,
+        )
+        return -1, -1
+    return int(m.group("major")), int(m.group("minor"))
+
+
+@functools.lru_cache()
+def _get_glibc_version() -> Tuple[int, int]:
+    version_str = _glibc_version_string()
+    if version_str is None:
+        return (-1, -1)
+    return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
+    sys_glibc = _get_glibc_version()
+    if sys_glibc < version:
+        return False
+    # Check for presence of _manylinux module.
+    try:
+        import _manylinux  # noqa
+    except ImportError:
+        return True
+    if hasattr(_manylinux, "manylinux_compatible"):
+        result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+        if result is not None:
+            return bool(result)
+        return True
+    if version == _GLibCVersion(2, 5):
+        if hasattr(_manylinux, "manylinux1_compatible"):
+            return bool(_manylinux.manylinux1_compatible)
+    if version == _GLibCVersion(2, 12):
+        if hasattr(_manylinux, "manylinux2010_compatible"):
+            return bool(_manylinux.manylinux2010_compatible)
+    if version == _GLibCVersion(2, 17):
+        if hasattr(_manylinux, "manylinux2014_compatible"):
+            return bool(_manylinux.manylinux2014_compatible)
+    return True
+
+
+_LEGACY_MANYLINUX_MAP = {
+    # CentOS 7 w/ glibc 2.17 (PEP 599)
+    (2, 17): "manylinux2014",
+    # CentOS 6 w/ glibc 2.12 (PEP 571)
+    (2, 12): "manylinux2010",
+    # CentOS 5 w/ glibc 2.5 (PEP 513)
+    (2, 5): "manylinux1",
+}
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+    """Generate manylinux tags compatible to the current platform.
+
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be manylinux-compatible.
+
+    :returns: An iterator of compatible manylinux tags.
+    """
+    if not _have_compatible_abi(sys.executable, archs):
+        return
+    # Oldest glibc to be supported regardless of architecture is (2, 17).
+    too_old_glibc2 = _GLibCVersion(2, 16)
+    if set(archs) & {"x86_64", "i686"}:
+        # On x86/i686 also oldest glibc to be supported is (2, 5).
+        too_old_glibc2 = _GLibCVersion(2, 4)
+    current_glibc = _GLibCVersion(*_get_glibc_version())
+    glibc_max_list = [current_glibc]
+    # We can assume compatibility across glibc major versions.
+    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+    #
+    # Build a list of maximum glibc versions so that we can
+    # output the canonical list of all glibc from current_glibc
+    # down to too_old_glibc2, including all intermediary versions.
+    for glibc_major in range(current_glibc.major - 1, 1, -1):
+        glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+        glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+    for arch in archs:
+        for glibc_max in glibc_max_list:
+            if glibc_max.major == too_old_glibc2.major:
+                min_minor = too_old_glibc2.minor
+            else:
+                # For other glibc major versions oldest supported is (x, 0).
+                min_minor = -1
+            for glibc_minor in range(glibc_max.minor, min_minor, -1):
+                glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+                tag = "manylinux_{}_{}".format(*glibc_version)
+                if _is_compatible(arch, glibc_version):
+                    yield f"{tag}_{arch}"
+                # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+                if glibc_version in _LEGACY_MANYLINUX_MAP:
+                    legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+                    if _is_compatible(arch, glibc_version):
+                        yield f"{legacy_tag}_{arch}"
diff --git a/src/wheel/vendored/packaging/_musllinux.py b/src/wheel/vendored/packaging/_musllinux.py
new file mode 100644 (file)
index 0000000..86419df
--- /dev/null
@@ -0,0 +1,83 @@
+"""PEP 656 support.
+
+This module implements logic to detect if the currently running Python is
+linked against musl, and what musl version is used.
+"""
+
+import functools
+import re
+import subprocess
+import sys
+from typing import Iterator, NamedTuple, Optional, Sequence
+
+from ._elffile import ELFFile
+
+
+class _MuslVersion(NamedTuple):
+    major: int
+    minor: int
+
+
+def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+    lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+    if len(lines) < 2 or lines[0][:4] != "musl":
+        return None
+    m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+    if not m:
+        return None
+    return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache()
+def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+    """Detect currently-running musl runtime version.
+
+    This is done by checking the specified executable's dynamic linking
+    information, and invoking the loader to parse its output for a version
+    string. If the loader is musl, the output would be something like::
+
+        musl libc (x86_64)
+        Version 1.2.2
+        Dynamic Program Loader
+    """
+    try:
+        with open(executable, "rb") as f:
+            ld = ELFFile(f).interpreter
+    except (OSError, TypeError, ValueError):
+        return None
+    if ld is None or "musl" not in ld:
+        return None
+    proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
+    return _parse_musl_version(proc.stderr)
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+    """Generate musllinux tags compatible to the current platform.
+
+    :param archs: Sequence of compatible architectures.
+        The first one shall be the closest to the actual architecture and be the part of
+        platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+        The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+        be musllinux-compatible.
+
+    :returns: An iterator of compatible musllinux tags.
+    """
+    sys_musl = _get_musl_version(sys.executable)
+    if sys_musl is None:  # Python not dynamically linked against musl.
+        return
+    for arch in archs:
+        for minor in range(sys_musl.minor, -1, -1):
+            yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+if __name__ == "__main__":  # pragma: no cover
+    import sysconfig
+
+    plat = sysconfig.get_platform()
+    assert plat.startswith("linux-"), "not linux"
+
+    print("plat:", plat)
+    print("musl:", _get_musl_version(sys.executable))
+    print("tags:", end=" ")
+    for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
+        print(t, end="\n      ")
diff --git a/src/wheel/vendored/packaging/_parser.py b/src/wheel/vendored/packaging/_parser.py
new file mode 100644 (file)
index 0000000..4576981
--- /dev/null
@@ -0,0 +1,359 @@
+"""Handwritten parser of dependency specifiers.
+
+The docstring for each __parse_* function contains ENBF-inspired grammar representing
+the implementation.
+"""
+
+import ast
+from typing import Any, List, NamedTuple, Optional, Tuple, Union
+
+from ._tokenizer import DEFAULT_RULES, Tokenizer
+
+
+class Node:
+    def __init__(self, value: str) -> None:
+        self.value = value
+
+    def __str__(self) -> str:
+        return self.value
+
+    def __repr__(self) -> str:
+        return f"<{self.__class__.__name__}('{self}')>"
+
+    def serialize(self) -> str:
+        raise NotImplementedError
+
+
+class Variable(Node):
+    def serialize(self) -> str:
+        return str(self)
+
+
+class Value(Node):
+    def serialize(self) -> str:
+        return f'"{self}"'
+
+
+class Op(Node):
+    def serialize(self) -> str:
+        return str(self)
+
+
+MarkerVar = Union[Variable, Value]
+MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
+# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
+# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
+# mypy does not support recursive type definition
+# https://github.com/python/mypy/issues/731
+MarkerAtom = Any
+MarkerList = List[Any]
+
+
+class ParsedRequirement(NamedTuple):
+    name: str
+    url: str
+    extras: List[str]
+    specifier: str
+    marker: Optional[MarkerList]
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for dependency specifier
+# --------------------------------------------------------------------------------------
+def parse_requirement(source: str) -> ParsedRequirement:
+    return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
+    """
+    requirement = WS? IDENTIFIER WS? extras WS? requirement_details
+    """
+    tokenizer.consume("WS")
+
+    name_token = tokenizer.expect(
+        "IDENTIFIER", expected="package name at the start of dependency specifier"
+    )
+    name = name_token.text
+    tokenizer.consume("WS")
+
+    extras = _parse_extras(tokenizer)
+    tokenizer.consume("WS")
+
+    url, specifier, marker = _parse_requirement_details(tokenizer)
+    tokenizer.expect("END", expected="end of dependency specifier")
+
+    return ParsedRequirement(name, url, extras, specifier, marker)
+
+
+def _parse_requirement_details(
+    tokenizer: Tokenizer,
+) -> Tuple[str, str, Optional[MarkerList]]:
+    """
+    requirement_details = AT URL (WS requirement_marker?)?
+                        | specifier WS? (requirement_marker)?
+    """
+
+    specifier = ""
+    url = ""
+    marker = None
+
+    if tokenizer.check("AT"):
+        tokenizer.read()
+        tokenizer.consume("WS")
+
+        url_start = tokenizer.position
+        url = tokenizer.expect("URL", expected="URL after @").text
+        if tokenizer.check("END", peek=True):
+            return (url, specifier, marker)
+
+        tokenizer.expect("WS", expected="whitespace after URL")
+
+        # The input might end after whitespace.
+        if tokenizer.check("END", peek=True):
+            return (url, specifier, marker)
+
+        marker = _parse_requirement_marker(
+            tokenizer, span_start=url_start, after="URL and whitespace"
+        )
+    else:
+        specifier_start = tokenizer.position
+        specifier = _parse_specifier(tokenizer)
+        tokenizer.consume("WS")
+
+        if tokenizer.check("END", peek=True):
+            return (url, specifier, marker)
+
+        marker = _parse_requirement_marker(
+            tokenizer,
+            span_start=specifier_start,
+            after=(
+                "version specifier"
+                if specifier
+                else "name and no valid version specifier"
+            ),
+        )
+
+    return (url, specifier, marker)
+
+
+def _parse_requirement_marker(
+    tokenizer: Tokenizer, *, span_start: int, after: str
+) -> MarkerList:
+    """
+    requirement_marker = SEMICOLON marker WS?
+    """
+
+    if not tokenizer.check("SEMICOLON"):
+        tokenizer.raise_syntax_error(
+            f"Expected end or semicolon (after {after})",
+            span_start=span_start,
+        )
+    tokenizer.read()
+
+    marker = _parse_marker(tokenizer)
+    tokenizer.consume("WS")
+
+    return marker
+
+
+def _parse_extras(tokenizer: Tokenizer) -> List[str]:
+    """
+    extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
+    """
+    if not tokenizer.check("LEFT_BRACKET", peek=True):
+        return []
+
+    with tokenizer.enclosing_tokens(
+        "LEFT_BRACKET",
+        "RIGHT_BRACKET",
+        around="extras",
+    ):
+        tokenizer.consume("WS")
+        extras = _parse_extras_list(tokenizer)
+        tokenizer.consume("WS")
+
+    return extras
+
+
+def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
+    """
+    extras_list = identifier (wsp* ',' wsp* identifier)*
+    """
+    extras: List[str] = []
+
+    if not tokenizer.check("IDENTIFIER"):
+        return extras
+
+    extras.append(tokenizer.read().text)
+
+    while True:
+        tokenizer.consume("WS")
+        if tokenizer.check("IDENTIFIER", peek=True):
+            tokenizer.raise_syntax_error("Expected comma between extra names")
+        elif not tokenizer.check("COMMA"):
+            break
+
+        tokenizer.read()
+        tokenizer.consume("WS")
+
+        extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
+        extras.append(extra_token.text)
+
+    return extras
+
+
+def _parse_specifier(tokenizer: Tokenizer) -> str:
+    """
+    specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
+              | WS? version_many WS?
+    """
+    with tokenizer.enclosing_tokens(
+        "LEFT_PARENTHESIS",
+        "RIGHT_PARENTHESIS",
+        around="version specifier",
+    ):
+        tokenizer.consume("WS")
+        parsed_specifiers = _parse_version_many(tokenizer)
+        tokenizer.consume("WS")
+
+    return parsed_specifiers
+
+
+def _parse_version_many(tokenizer: Tokenizer) -> str:
+    """
+    version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
+    """
+    parsed_specifiers = ""
+    while tokenizer.check("SPECIFIER"):
+        span_start = tokenizer.position
+        parsed_specifiers += tokenizer.read().text
+        if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
+            tokenizer.raise_syntax_error(
+                ".* suffix can only be used with `==` or `!=` operators",
+                span_start=span_start,
+                span_end=tokenizer.position + 1,
+            )
+        if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
+            tokenizer.raise_syntax_error(
+                "Local version label can only be used with `==` or `!=` operators",
+                span_start=span_start,
+                span_end=tokenizer.position,
+            )
+        tokenizer.consume("WS")
+        if not tokenizer.check("COMMA"):
+            break
+        parsed_specifiers += tokenizer.read().text
+        tokenizer.consume("WS")
+
+    return parsed_specifiers
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for marker expression
+# --------------------------------------------------------------------------------------
+def parse_marker(source: str) -> MarkerList:
+    return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
+    retval = _parse_marker(tokenizer)
+    tokenizer.expect("END", expected="end of marker expression")
+    return retval
+
+
+def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
+    """
+    marker = marker_atom (BOOLOP marker_atom)+
+    """
+    expression = [_parse_marker_atom(tokenizer)]
+    while tokenizer.check("BOOLOP"):
+        token = tokenizer.read()
+        expr_right = _parse_marker_atom(tokenizer)
+        expression.extend((token.text, expr_right))
+    return expression
+
+
+def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
+    """
+    marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
+                | WS? marker_item WS?
+    """
+
+    tokenizer.consume("WS")
+    if tokenizer.check("LEFT_PARENTHESIS", peek=True):
+        with tokenizer.enclosing_tokens(
+            "LEFT_PARENTHESIS",
+            "RIGHT_PARENTHESIS",
+            around="marker expression",
+        ):
+            tokenizer.consume("WS")
+            marker: MarkerAtom = _parse_marker(tokenizer)
+            tokenizer.consume("WS")
+    else:
+        marker = _parse_marker_item(tokenizer)
+    tokenizer.consume("WS")
+    return marker
+
+
+def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
+    """
+    marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
+    """
+    tokenizer.consume("WS")
+    marker_var_left = _parse_marker_var(tokenizer)
+    tokenizer.consume("WS")
+    marker_op = _parse_marker_op(tokenizer)
+    tokenizer.consume("WS")
+    marker_var_right = _parse_marker_var(tokenizer)
+    tokenizer.consume("WS")
+    return (marker_var_left, marker_op, marker_var_right)
+
+
+def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
+    """
+    marker_var = VARIABLE | QUOTED_STRING
+    """
+    if tokenizer.check("VARIABLE"):
+        return process_env_var(tokenizer.read().text.replace(".", "_"))
+    elif tokenizer.check("QUOTED_STRING"):
+        return process_python_str(tokenizer.read().text)
+    else:
+        tokenizer.raise_syntax_error(
+            message="Expected a marker variable or quoted string"
+        )
+
+
+def process_env_var(env_var: str) -> Variable:
+    if (
+        env_var == "platform_python_implementation"
+        or env_var == "python_implementation"
+    ):
+        return Variable("platform_python_implementation")
+    else:
+        return Variable(env_var)
+
+
+def process_python_str(python_str: str) -> Value:
+    value = ast.literal_eval(python_str)
+    return Value(str(value))
+
+
+def _parse_marker_op(tokenizer: Tokenizer) -> Op:
+    """
+    marker_op = IN | NOT IN | OP
+    """
+    if tokenizer.check("IN"):
+        tokenizer.read()
+        return Op("in")
+    elif tokenizer.check("NOT"):
+        tokenizer.read()
+        tokenizer.expect("WS", expected="whitespace after 'not'")
+        tokenizer.expect("IN", expected="'in' after 'not'")
+        return Op("not in")
+    elif tokenizer.check("OP"):
+        return Op(tokenizer.read().text)
+    else:
+        return tokenizer.raise_syntax_error(
+            "Expected marker operator, one of "
+            "<=, <, !=, ==, >=, >, ~=, ===, in, not in"
+        )
diff --git a/src/wheel/vendored/packaging/_structures.py b/src/wheel/vendored/packaging/_structures.py
new file mode 100644 (file)
index 0000000..90a6465
--- /dev/null
@@ -0,0 +1,61 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+class InfinityType:
+    def __repr__(self) -> str:
+        return "Infinity"
+
+    def __hash__(self) -> int:
+        return hash(repr(self))
+
+    def __lt__(self, other: object) -> bool:
+        return False
+
+    def __le__(self, other: object) -> bool:
+        return False
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, self.__class__)
+
+    def __gt__(self, other: object) -> bool:
+        return True
+
+    def __ge__(self, other: object) -> bool:
+        return True
+
+    def __neg__(self: object) -> "NegativeInfinityType":
+        return NegativeInfinity
+
+
+Infinity = InfinityType()
+
+
+class NegativeInfinityType:
+    def __repr__(self) -> str:
+        return "-Infinity"
+
+    def __hash__(self) -> int:
+        return hash(repr(self))
+
+    def __lt__(self, other: object) -> bool:
+        return True
+
+    def __le__(self, other: object) -> bool:
+        return True
+
+    def __eq__(self, other: object) -> bool:
+        return isinstance(other, self.__class__)
+
+    def __gt__(self, other: object) -> bool:
+        return False
+
+    def __ge__(self, other: object) -> bool:
+        return False
+
+    def __neg__(self: object) -> InfinityType:
+        return Infinity
+
+
+NegativeInfinity = NegativeInfinityType()
diff --git a/src/wheel/vendored/packaging/_tokenizer.py b/src/wheel/vendored/packaging/_tokenizer.py
new file mode 100644 (file)
index 0000000..dd0d648
--- /dev/null
@@ -0,0 +1,192 @@
+import contextlib
+import re
+from dataclasses import dataclass
+from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
+
+from .specifiers import Specifier
+
+
+@dataclass
+class Token:
+    name: str
+    text: str
+    position: int
+
+
+class ParserSyntaxError(Exception):
+    """The provided source text could not be parsed correctly."""
+
+    def __init__(
+        self,
+        message: str,
+        *,
+        source: str,
+        span: Tuple[int, int],
+    ) -> None:
+        self.span = span
+        self.message = message
+        self.source = source
+
+        super().__init__()
+
+    def __str__(self) -> str:
+        marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
+        return "\n    ".join([self.message, self.source, marker])
+
+
+DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
+    "LEFT_PARENTHESIS": r"\(",
+    "RIGHT_PARENTHESIS": r"\)",
+    "LEFT_BRACKET": r"\[",
+    "RIGHT_BRACKET": r"\]",
+    "SEMICOLON": r";",
+    "COMMA": r",",
+    "QUOTED_STRING": re.compile(
+        r"""
+            (
+                ('[^']*')
+                |
+                ("[^"]*")
+            )
+        """,
+        re.VERBOSE,
+    ),
+    "OP": r"(===|==|~=|!=|<=|>=|<|>)",
+    "BOOLOP": r"\b(or|and)\b",
+    "IN": r"\bin\b",
+    "NOT": r"\bnot\b",
+    "VARIABLE": re.compile(
+        r"""
+            \b(
+                python_version
+                |python_full_version
+                |os[._]name
+                |sys[._]platform
+                |platform_(release|system)
+                |platform[._](version|machine|python_implementation)
+                |python_implementation
+                |implementation_(name|version)
+                |extra
+            )\b
+        """,
+        re.VERBOSE,
+    ),
+    "SPECIFIER": re.compile(
+        Specifier._operator_regex_str + Specifier._version_regex_str,
+        re.VERBOSE | re.IGNORECASE,
+    ),
+    "AT": r"\@",
+    "URL": r"[^ \t]+",
+    "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
+    "VERSION_PREFIX_TRAIL": r"\.\*",
+    "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
+    "WS": r"[ \t]+",
+    "END": r"$",
+}
+
+
+class Tokenizer:
+    """Context-sensitive token parsing.
+
+    Provides methods to examine the input stream to check whether the next token
+    matches.
+    """
+
+    def __init__(
+        self,
+        source: str,
+        *,
+        rules: "Dict[str, Union[str, re.Pattern[str]]]",
+    ) -> None:
+        self.source = source
+        self.rules: Dict[str, re.Pattern[str]] = {
+            name: re.compile(pattern) for name, pattern in rules.items()
+        }
+        self.next_token: Optional[Token] = None
+        self.position = 0
+
+    def consume(self, name: str) -> None:
+        """Move beyond provided token name, if at current position."""
+        if self.check(name):
+            self.read()
+
+    def check(self, name: str, *, peek: bool = False) -> bool:
+        """Check whether the next token has the provided name.
+
+        By default, if the check succeeds, the token *must* be read before
+        another check. If `peek` is set to `True`, the token is not loaded and
+        would need to be checked again.
+        """
+        assert (
+            self.next_token is None
+        ), f"Cannot check for {name!r}, already have {self.next_token!r}"
+        assert name in self.rules, f"Unknown token name: {name!r}"
+
+        expression = self.rules[name]
+
+        match = expression.match(self.source, self.position)
+        if match is None:
+            return False
+        if not peek:
+            self.next_token = Token(name, match[0], self.position)
+        return True
+
+    def expect(self, name: str, *, expected: str) -> Token:
+        """Expect a certain token name next, failing with a syntax error otherwise.
+
+        The token is *not* read.
+        """
+        if not self.check(name):
+            raise self.raise_syntax_error(f"Expected {expected}")
+        return self.read()
+
+    def read(self) -> Token:
+        """Consume the next token and return it."""
+        token = self.next_token
+        assert token is not None
+
+        self.position += len(token.text)
+        self.next_token = None
+
+        return token
+
+    def raise_syntax_error(
+        self,
+        message: str,
+        *,
+        span_start: Optional[int] = None,
+        span_end: Optional[int] = None,
+    ) -> NoReturn:
+        """Raise ParserSyntaxError at the given position."""
+        span = (
+            self.position if span_start is None else span_start,
+            self.position if span_end is None else span_end,
+        )
+        raise ParserSyntaxError(
+            message,
+            source=self.source,
+            span=span,
+        )
+
+    @contextlib.contextmanager
+    def enclosing_tokens(
+        self, open_token: str, close_token: str, *, around: str
+    ) -> Iterator[None]:
+        if self.check(open_token):
+            open_position = self.position
+            self.read()
+        else:
+            open_position = None
+
+        yield
+
+        if open_position is None:
+            return
+
+        if not self.check(close_token):
+            self.raise_syntax_error(
+                f"Expected matching {close_token} for {open_token}, after {around}",
+                span_start=open_position,
+            )
+
+        self.read()
diff --git a/src/wheel/vendored/packaging/markers.py b/src/wheel/vendored/packaging/markers.py
new file mode 100644 (file)
index 0000000..8b98fca
--- /dev/null
@@ -0,0 +1,252 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import operator
+import os
+import platform
+import sys
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
+from ._parser import (
+    MarkerAtom,
+    MarkerList,
+    Op,
+    Value,
+    Variable,
+    parse_marker as _parse_marker,
+)
+from ._tokenizer import ParserSyntaxError
+from .specifiers import InvalidSpecifier, Specifier
+from .utils import canonicalize_name
+
+__all__ = [
+    "InvalidMarker",
+    "UndefinedComparison",
+    "UndefinedEnvironmentName",
+    "Marker",
+    "default_environment",
+]
+
+Operator = Callable[[str, str], bool]
+
+
+class InvalidMarker(ValueError):
+    """
+    An invalid marker was found, users should refer to PEP 508.
+    """
+
+
+class UndefinedComparison(ValueError):
+    """
+    An invalid operation was attempted on a value that doesn't support it.
+    """
+
+
+class UndefinedEnvironmentName(ValueError):
+    """
+    A name was attempted to be used that does not exist inside of the
+    environment.
+    """
+
+
+def _normalize_extra_values(results: Any) -> Any:
+    """
+    Normalize extra values.
+    """
+    if isinstance(results[0], tuple):
+        lhs, op, rhs = results[0]
+        if isinstance(lhs, Variable) and lhs.value == "extra":
+            normalized_extra = canonicalize_name(rhs.value)
+            rhs = Value(normalized_extra)
+        elif isinstance(rhs, Variable) and rhs.value == "extra":
+            normalized_extra = canonicalize_name(lhs.value)
+            lhs = Value(normalized_extra)
+        results[0] = lhs, op, rhs
+    return results
+
+
+def _format_marker(
+    marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
+) -> str:
+
+    assert isinstance(marker, (list, tuple, str))
+
+    # Sometimes we have a structure like [[...]] which is a single item list
+    # where the single item is itself it's own list. In that case we want skip
+    # the rest of this function so that we don't get extraneous () on the
+    # outside.
+    if (
+        isinstance(marker, list)
+        and len(marker) == 1
+        and isinstance(marker[0], (list, tuple))
+    ):
+        return _format_marker(marker[0])
+
+    if isinstance(marker, list):
+        inner = (_format_marker(m, first=False) for m in marker)
+        if first:
+            return " ".join(inner)
+        else:
+            return "(" + " ".join(inner) + ")"
+    elif isinstance(marker, tuple):
+        return " ".join([m.serialize() for m in marker])
+    else:
+        return marker
+
+
+_operators: Dict[str, Operator] = {
+    "in": lambda lhs, rhs: lhs in rhs,
+    "not in": lambda lhs, rhs: lhs not in rhs,
+    "<": operator.lt,
+    "<=": operator.le,
+    "==": operator.eq,
+    "!=": operator.ne,
+    ">=": operator.ge,
+    ">": operator.gt,
+}
+
+
+def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
+    try:
+        spec = Specifier("".join([op.serialize(), rhs]))
+    except InvalidSpecifier:
+        pass
+    else:
+        return spec.contains(lhs, prereleases=True)
+
+    oper: Optional[Operator] = _operators.get(op.serialize())
+    if oper is None:
+        raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
+
+    return oper(lhs, rhs)
+
+
+def _normalize(*values: str, key: str) -> Tuple[str, ...]:
+    # PEP 685 – Comparison of extra names for optional distribution dependencies
+    # https://peps.python.org/pep-0685/
+    # > When comparing extra names, tools MUST normalize the names being
+    # > compared using the semantics outlined in PEP 503 for names
+    if key == "extra":
+        return tuple(canonicalize_name(v) for v in values)
+
+    # other environment markers don't have such standards
+    return values
+
+
+def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
+    groups: List[List[bool]] = [[]]
+
+    for marker in markers:
+        assert isinstance(marker, (list, tuple, str))
+
+        if isinstance(marker, list):
+            groups[-1].append(_evaluate_markers(marker, environment))
+        elif isinstance(marker, tuple):
+            lhs, op, rhs = marker
+
+            if isinstance(lhs, Variable):
+                environment_key = lhs.value
+                lhs_value = environment[environment_key]
+                rhs_value = rhs.value
+            else:
+                lhs_value = lhs.value
+                environment_key = rhs.value
+                rhs_value = environment[environment_key]
+
+            lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
+            groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+        else:
+            assert marker in ["and", "or"]
+            if marker == "or":
+                groups.append([])
+
+    return any(all(item) for item in groups)
+
+
+def format_full_version(info: "sys._version_info") -> str:
+    version = "{0.major}.{0.minor}.{0.micro}".format(info)
+    kind = info.releaselevel
+    if kind != "final":
+        version += kind[0] + str(info.serial)
+    return version
+
+
+def default_environment() -> Dict[str, str]:
+    iver = format_full_version(sys.implementation.version)
+    implementation_name = sys.implementation.name
+    return {
+        "implementation_name": implementation_name,
+        "implementation_version": iver,
+        "os_name": os.name,
+        "platform_machine": platform.machine(),
+        "platform_release": platform.release(),
+        "platform_system": platform.system(),
+        "platform_version": platform.version(),
+        "python_full_version": platform.python_version(),
+        "platform_python_implementation": platform.python_implementation(),
+        "python_version": ".".join(platform.python_version_tuple()[:2]),
+        "sys_platform": sys.platform,
+    }
+
+
+class Marker:
+    def __init__(self, marker: str) -> None:
+        # Note: We create a Marker object without calling this constructor in
+        #       packaging.requirements.Requirement. If any additional logic is
+        #       added here, make sure to mirror/adapt Requirement.
+        try:
+            self._markers = _normalize_extra_values(_parse_marker(marker))
+            # The attribute `_markers` can be described in terms of a recursive type:
+            # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
+            #
+            # For example, the following expression:
+            # python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
+            #
+            # is parsed into:
+            # [
+            #     (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
+            #     'and',
+            #     [
+            #         (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
+            #         'or',
+            #         (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
+            #     ]
+            # ]
+        except ParserSyntaxError as e:
+            raise InvalidMarker(str(e)) from e
+
+    def __str__(self) -> str:
+        return _format_marker(self._markers)
+
+    def __repr__(self) -> str:
+        return f"<Marker('{self}')>"
+
+    def __hash__(self) -> int:
+        return hash((self.__class__.__name__, str(self)))
+
+    def __eq__(self, other: Any) -> bool:
+        if not isinstance(other, Marker):
+            return NotImplemented
+
+        return str(self) == str(other)
+
+    def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
+        """Evaluate a marker.
+
+        Return the boolean from evaluating the given marker against the
+        environment. environment is an optional argument to override all or
+        part of the determined environment.
+
+        The environment is determined from the current Python process.
+        """
+        current_environment = default_environment()
+        current_environment["extra"] = ""
+        if environment is not None:
+            current_environment.update(environment)
+            # The API used to allow setting extra to None. We need to handle this
+            # case for backwards compatibility.
+            if current_environment["extra"] is None:
+                current_environment["extra"] = ""
+
+        return _evaluate_markers(self._markers, current_environment)
diff --git a/src/wheel/vendored/packaging/requirements.py b/src/wheel/vendored/packaging/requirements.py
new file mode 100644 (file)
index 0000000..0c00eba
--- /dev/null
@@ -0,0 +1,90 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from typing import Any, Iterator, Optional, Set
+
+from ._parser import parse_requirement as _parse_requirement
+from ._tokenizer import ParserSyntaxError
+from .markers import Marker, _normalize_extra_values
+from .specifiers import SpecifierSet
+from .utils import canonicalize_name
+
+
+class InvalidRequirement(ValueError):
+    """
+    An invalid requirement was found, users should refer to PEP 508.
+    """
+
+
+class Requirement:
+    """Parse a requirement.
+
+    Parse a given requirement string into its parts, such as name, specifier,
+    URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+    string.
+    """
+
+    # TODO: Can we test whether something is contained within a requirement?
+    #       If so how do we do that? Do we need to test against the _name_ of
+    #       the thing as well as the version? What about the markers?
+    # TODO: Can we normalize the name and extra name?
+
+    def __init__(self, requirement_string: str) -> None:
+        try:
+            parsed = _parse_requirement(requirement_string)
+        except ParserSyntaxError as e:
+            raise InvalidRequirement(str(e)) from e
+
+        self.name: str = parsed.name
+        self.url: Optional[str] = parsed.url or None
+        self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
+        self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
+        self.marker: Optional[Marker] = None
+        if parsed.marker is not None:
+            self.marker = Marker.__new__(Marker)
+            self.marker._markers = _normalize_extra_values(parsed.marker)
+
+    def _iter_parts(self, name: str) -> Iterator[str]:
+        yield name
+
+        if self.extras:
+            formatted_extras = ",".join(sorted(self.extras))
+            yield f"[{formatted_extras}]"
+
+        if self.specifier:
+            yield str(self.specifier)
+
+        if self.url:
+            yield f"@ {self.url}"
+            if self.marker:
+                yield " "
+
+        if self.marker:
+            yield f"; {self.marker}"
+
+    def __str__(self) -> str:
+        return "".join(self._iter_parts(self.name))
+
+    def __repr__(self) -> str:
+        return f"<Requirement('{self}')>"
+
+    def __hash__(self) -> int:
+        return hash(
+            (
+                self.__class__.__name__,
+                *self._iter_parts(canonicalize_name(self.name)),
+            )
+        )
+
+    def __eq__(self, other: Any) -> bool:
+        if not isinstance(other, Requirement):
+            return NotImplemented
+
+        return (
+            canonicalize_name(self.name) == canonicalize_name(other.name)
+            and self.extras == other.extras
+            and self.specifier == other.specifier
+            and self.url == other.url
+            and self.marker == other.marker
+        )
diff --git a/src/wheel/vendored/packaging/specifiers.py b/src/wheel/vendored/packaging/specifiers.py
new file mode 100644 (file)
index 0000000..ba8fe37
--- /dev/null
@@ -0,0 +1,1008 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+    from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
+    from packaging.version import Version
+"""
+
+import abc
+import itertools
+import re
+from typing import (
+    Callable,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Set,
+    Tuple,
+    TypeVar,
+    Union,
+)
+
+from .utils import canonicalize_version
+from .version import Version
+
+UnparsedVersion = Union[Version, str]
+UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
+CallableOperator = Callable[[Version, str], bool]
+
+
+def _coerce_version(version: UnparsedVersion) -> Version:
+    if not isinstance(version, Version):
+        version = Version(version)
+    return version
+
+
+class InvalidSpecifier(ValueError):
+    """
+    Raised when attempting to create a :class:`Specifier` with a specifier
+    string that is invalid.
+
+    >>> Specifier("lolwat")
+    Traceback (most recent call last):
+        ...
+    packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
+    """
+
+
+class BaseSpecifier(metaclass=abc.ABCMeta):
+    @abc.abstractmethod
+    def __str__(self) -> str:
+        """
+        Returns the str representation of this Specifier-like object. This
+        should be representative of the Specifier itself.
+        """
+
+    @abc.abstractmethod
+    def __hash__(self) -> int:
+        """
+        Returns a hash value for this Specifier-like object.
+        """
+
+    @abc.abstractmethod
+    def __eq__(self, other: object) -> bool:
+        """
+        Returns a boolean representing whether or not the two Specifier-like
+        objects are equal.
+
+        :param other: The other object to check against.
+        """
+
+    @property
+    @abc.abstractmethod
+    def prereleases(self) -> Optional[bool]:
+        """Whether or not pre-releases as a whole are allowed.
+
+        This can be set to either ``True`` or ``False`` to explicitly enable or disable
+        prereleases or it can be set to ``None`` (the default) to use default semantics.
+        """
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        """Setter for :attr:`prereleases`.
+
+        :param value: The value to set.
+        """
+
+    @abc.abstractmethod
+    def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
+        """
+        Determines if the given item is contained within this specifier.
+        """
+
+    @abc.abstractmethod
+    def filter(
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+    ) -> Iterator[UnparsedVersionVar]:
+        """
+        Takes an iterable of items and filters them so that only items which
+        are contained within this specifier are allowed in it.
+        """
+
+
+class Specifier(BaseSpecifier):
+    """This class abstracts handling of version specifiers.
+
+    .. tip::
+
+        It is generally not required to instantiate this manually. You should instead
+        prefer to work with :class:`SpecifierSet` instead, which can parse
+        comma-separated version specifiers (which is what package metadata contains).
+    """
+
+    _operator_regex_str = r"""
+        (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+        """
+    _version_regex_str = r"""
+        (?P<version>
+            (?:
+                # The identity operators allow for an escape hatch that will
+                # do an exact string match of the version you wish to install.
+                # This will not be parsed by PEP 440 and we cannot determine
+                # any semantic meaning from it. This operator is discouraged
+                # but included entirely as an escape hatch.
+                (?<====)  # Only match for the identity operator
+                \s*
+                [^\s;)]*  # The arbitrary version can be just about anything,
+                          # we match everything except for whitespace, a
+                          # semi-colon for marker support, and a closing paren
+                          # since versions can be enclosed in them.
+            )
+            |
+            (?:
+                # The (non)equality operators allow for wild card and local
+                # versions to be specified so we have to define these two
+                # operators separately to enable that.
+                (?<===|!=)            # Only match for equals and not equals
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+
+                # You cannot use a wild card and a pre-release, post-release, a dev or
+                # local version together so group them with a | and make them optional.
+                (?:
+                    \.\*  # Wild card syntax of .*
+                    |
+                    (?:                                  # pre release
+                        [-_\.]?
+                        (alpha|beta|preview|pre|a|b|c|rc)
+                        [-_\.]?
+                        [0-9]*
+                    )?
+                    (?:                                  # post release
+                        (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                    )?
+                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
+                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+                )?
+            )
+            |
+            (?:
+                # The compatible operator requires at least two digits in the
+                # release segment.
+                (?<=~=)               # Only match for the compatible operator
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
+                (?:                   # pre release
+                    [-_\.]?
+                    (alpha|beta|preview|pre|a|b|c|rc)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+            |
+            (?:
+                # All other operators only allow a sub set of what the
+                # (non)equality operators do. Specifically they do not allow
+                # local versions to be specified nor do they allow the prefix
+                # matching wild cards.
+                (?<!==|!=|~=)         # We have special cases for these
+                                      # operators so we want to make sure they
+                                      # don't match here.
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+                (?:                   # pre release
+                    [-_\.]?
+                    (alpha|beta|preview|pre|a|b|c|rc)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+        )
+        """
+
+    _regex = re.compile(
+        r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    _operators = {
+        "~=": "compatible",
+        "==": "equal",
+        "!=": "not_equal",
+        "<=": "less_than_equal",
+        ">=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+        "===": "arbitrary",
+    }
+
+    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+        """Initialize a Specifier instance.
+
+        :param spec:
+            The string representation of a specifier which will be parsed and
+            normalized before use.
+        :param prereleases:
+            This tells the specifier if it should accept prerelease versions if
+            applicable or not. The default of ``None`` will autodetect it from the
+            given specifiers.
+        :raises InvalidSpecifier:
+            If the given specifier is invalid (i.e. bad syntax).
+        """
+        match = self._regex.search(spec)
+        if not match:
+            raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
+
+        self._spec: Tuple[str, str] = (
+            match.group("operator").strip(),
+            match.group("version").strip(),
+        )
+
+        # Store whether or not this Specifier should accept prereleases
+        self._prereleases = prereleases
+
+    # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
+    @property  # type: ignore[override]
+    def prereleases(self) -> bool:
+        # If there is an explicit prereleases set for this, then we'll just
+        # blindly use that.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # Look at all of our specifiers and determine if they are inclusive
+        # operators, and if they are if they are including an explicit
+        # prerelease.
+        operator, version = self._spec
+        if operator in ["==", ">=", "<=", "~=", "==="]:
+            # The == specifier can include a trailing .*, if it does we
+            # want to remove before parsing.
+            if operator == "==" and version.endswith(".*"):
+                version = version[:-2]
+
+            # Parse the version, and if it is a pre-release than this
+            # specifier allows pre-releases.
+            if Version(version).is_prerelease:
+                return True
+
+        return False
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    @property
+    def operator(self) -> str:
+        """The operator of this specifier.
+
+        >>> Specifier("==1.2.3").operator
+        '=='
+        """
+        return self._spec[0]
+
+    @property
+    def version(self) -> str:
+        """The version of this specifier.
+
+        >>> Specifier("==1.2.3").version
+        '1.2.3'
+        """
+        return self._spec[1]
+
+    def __repr__(self) -> str:
+        """A representation of the Specifier that shows all internal state.
+
+        >>> Specifier('>=1.0.0')
+        <Specifier('>=1.0.0')>
+        >>> Specifier('>=1.0.0', prereleases=False)
+        <Specifier('>=1.0.0', prereleases=False)>
+        >>> Specifier('>=1.0.0', prereleases=True)
+        <Specifier('>=1.0.0', prereleases=True)>
+        """
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+
+        return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
+
+    def __str__(self) -> str:
+        """A string representation of the Specifier that can be round-tripped.
+
+        >>> str(Specifier('>=1.0.0'))
+        '>=1.0.0'
+        >>> str(Specifier('>=1.0.0', prereleases=False))
+        '>=1.0.0'
+        """
+        return "{}{}".format(*self._spec)
+
+    @property
+    def _canonical_spec(self) -> Tuple[str, str]:
+        canonical_version = canonicalize_version(
+            self._spec[1],
+            strip_trailing_zero=(self._spec[0] != "~="),
+        )
+        return self._spec[0], canonical_version
+
+    def __hash__(self) -> int:
+        return hash(self._canonical_spec)
+
+    def __eq__(self, other: object) -> bool:
+        """Whether or not the two Specifier-like objects are equal.
+
+        :param other: The other object to check against.
+
+        The value of :attr:`prereleases` is ignored.
+
+        >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
+        True
+        >>> (Specifier("==1.2.3", prereleases=False) ==
+        ...  Specifier("==1.2.3", prereleases=True))
+        True
+        >>> Specifier("==1.2.3") == "==1.2.3"
+        True
+        >>> Specifier("==1.2.3") == Specifier("==1.2.4")
+        False
+        >>> Specifier("==1.2.3") == Specifier("~=1.2.3")
+        False
+        """
+        if isinstance(other, str):
+            try:
+                other = self.__class__(str(other))
+            except InvalidSpecifier:
+                return NotImplemented
+        elif not isinstance(other, self.__class__):
+            return NotImplemented
+
+        return self._canonical_spec == other._canonical_spec
+
+    def _get_operator(self, op: str) -> CallableOperator:
+        operator_callable: CallableOperator = getattr(
+            self, f"_compare_{self._operators[op]}"
+        )
+        return operator_callable
+
+    def _compare_compatible(self, prospective: Version, spec: str) -> bool:
+
+        # Compatible releases have an equivalent combination of >= and ==. That
+        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+        # implement this in terms of the other specifiers instead of
+        # implementing it ourselves. The only thing we need to do is construct
+        # the other specifiers.
+
+        # We want everything but the last item in the version, but we want to
+        # ignore suffix segments.
+        prefix = ".".join(
+            list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
+        )
+
+        # Add the prefix notation to the end of our string
+        prefix += ".*"
+
+        return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+            prospective, prefix
+        )
+
+    def _compare_equal(self, prospective: Version, spec: str) -> bool:
+
+        # We need special logic to handle prefix matching
+        if spec.endswith(".*"):
+            # In the case of prefix matching we want to ignore local segment.
+            normalized_prospective = canonicalize_version(
+                prospective.public, strip_trailing_zero=False
+            )
+            # Get the normalized version string ignoring the trailing .*
+            normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
+            # Split the spec out by dots, and pretend that there is an implicit
+            # dot in between a release segment and a pre-release segment.
+            split_spec = _version_split(normalized_spec)
+
+            # Split the prospective version out by dots, and pretend that there
+            # is an implicit dot in between a release segment and a pre-release
+            # segment.
+            split_prospective = _version_split(normalized_prospective)
+
+            # 0-pad the prospective version before shortening it to get the correct
+            # shortened version.
+            padded_prospective, _ = _pad_version(split_prospective, split_spec)
+
+            # Shorten the prospective version to be the same length as the spec
+            # so that we can determine if the specifier is a prefix of the
+            # prospective version or not.
+            shortened_prospective = padded_prospective[: len(split_spec)]
+
+            return shortened_prospective == split_spec
+        else:
+            # Convert our spec string into a Version
+            spec_version = Version(spec)
+
+            # If the specifier does not have a local segment, then we want to
+            # act as if the prospective version also does not have a local
+            # segment.
+            if not spec_version.local:
+                prospective = Version(prospective.public)
+
+            return prospective == spec_version
+
+    def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
+        return not self._compare_equal(prospective, spec)
+
+    def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
+
+        # NB: Local version identifiers are NOT permitted in the version
+        # specifier, so local version labels can be universally removed from
+        # the prospective version.
+        return Version(prospective.public) <= Version(spec)
+
+    def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
+
+        # NB: Local version identifiers are NOT permitted in the version
+        # specifier, so local version labels can be universally removed from
+        # the prospective version.
+        return Version(prospective.public) >= Version(spec)
+
+    def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
+
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec_str)
+
+        # Check to see if the prospective version is less than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective < spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a pre-release version, that we do not accept pre-release
+        # versions for the version mentioned in the specifier (e.g. <3.1 should
+        # not match 3.1.dev0, but should match 3.0.dev0).
+        if not spec.is_prerelease and prospective.is_prerelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # less than the spec version *and* it's not a pre-release of the same
+        # version in the spec.
+        return True
+
+    def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
+
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec_str)
+
+        # Check to see if the prospective version is greater than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective > spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a post-release version, that we do not accept
+        # post-release versions for the version mentioned in the specifier
+        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+        if not spec.is_postrelease and prospective.is_postrelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # Ensure that we do not allow a local version of the version mentioned
+        # in the specifier, which is technically greater than, to match.
+        if prospective.local is not None:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # greater than the spec version *and* it's not a pre-release of the
+        # same version in the spec.
+        return True
+
+    def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
+        return str(prospective).lower() == str(spec).lower()
+
+    def __contains__(self, item: Union[str, Version]) -> bool:
+        """Return whether or not the item is contained in this specifier.
+
+        :param item: The item to check for.
+
+        This is used for the ``in`` operator and behaves the same as
+        :meth:`contains` with no ``prereleases`` argument passed.
+
+        >>> "1.2.3" in Specifier(">=1.2.3")
+        True
+        >>> Version("1.2.3") in Specifier(">=1.2.3")
+        True
+        >>> "1.0.0" in Specifier(">=1.2.3")
+        False
+        >>> "1.3.0a1" in Specifier(">=1.2.3")
+        False
+        >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
+        True
+        """
+        return self.contains(item)
+
+    def contains(
+        self, item: UnparsedVersion, prereleases: Optional[bool] = None
+    ) -> bool:
+        """Return whether or not the item is contained in this specifier.
+
+        :param item:
+            The item to check for, which can be a version string or a
+            :class:`Version` instance.
+        :param prereleases:
+            Whether or not to match prereleases with this Specifier. If set to
+            ``None`` (the default), it uses :attr:`prereleases` to determine
+            whether or not prereleases are allowed.
+
+        >>> Specifier(">=1.2.3").contains("1.2.3")
+        True
+        >>> Specifier(">=1.2.3").contains(Version("1.2.3"))
+        True
+        >>> Specifier(">=1.2.3").contains("1.0.0")
+        False
+        >>> Specifier(">=1.2.3").contains("1.3.0a1")
+        False
+        >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
+        True
+        >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
+        True
+        """
+
+        # Determine if prereleases are to be allowed or not.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # Normalize item to a Version, this allows us to have a shortcut for
+        # "2.0" in Specifier(">=2")
+        normalized_item = _coerce_version(item)
+
+        # Determine if we should be supporting prereleases in this specifier
+        # or not, if we do not support prereleases than we can short circuit
+        # logic if this version is a prereleases.
+        if normalized_item.is_prerelease and not prereleases:
+            return False
+
+        # Actually do the comparison to determine if this item is contained
+        # within this Specifier or not.
+        operator_callable: CallableOperator = self._get_operator(self.operator)
+        return operator_callable(normalized_item, self.version)
+
+    def filter(
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+    ) -> Iterator[UnparsedVersionVar]:
+        """Filter items in the given iterable, that match the specifier.
+
+        :param iterable:
+            An iterable that can contain version strings and :class:`Version` instances.
+            The items in the iterable will be filtered according to the specifier.
+        :param prereleases:
+            Whether or not to allow prereleases in the returned iterator. If set to
+            ``None`` (the default), it will be intelligently decide whether to allow
+            prereleases or not (based on the :attr:`prereleases` attribute, and
+            whether the only versions matching are prereleases).
+
+        This method is smarter than just ``filter(Specifier().contains, [...])``
+        because it implements the rule from :pep:`440` that a prerelease item
+        SHOULD be accepted if no other versions match the given specifier.
+
+        >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+        ['1.3']
+        >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
+        ['1.2.3', '1.3', <Version('1.4')>]
+        >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
+        ['1.5a1']
+        >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+        ['1.3', '1.5a1']
+        >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+        ['1.3', '1.5a1']
+        """
+
+        yielded = False
+        found_prereleases = []
+
+        kw = {"prereleases": prereleases if prereleases is not None else True}
+
+        # Attempt to iterate over all the values in the iterable and if any of
+        # them match, yield them.
+        for version in iterable:
+            parsed_version = _coerce_version(version)
+
+            if self.contains(parsed_version, **kw):
+                # If our version is a prerelease, and we were not set to allow
+                # prereleases, then we'll store it for later in case nothing
+                # else matches this specifier.
+                if parsed_version.is_prerelease and not (
+                    prereleases or self.prereleases
+                ):
+                    found_prereleases.append(version)
+                # Either this is not a prerelease, or we should have been
+                # accepting prereleases from the beginning.
+                else:
+                    yielded = True
+                    yield version
+
+        # Now that we've iterated over everything, determine if we've yielded
+        # any values, and if we have not and we have any prereleases stored up
+        # then we will go ahead and yield the prereleases.
+        if not yielded and found_prereleases:
+            for version in found_prereleases:
+                yield version
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version: str) -> List[str]:
+    result: List[str] = []
+    for item in version.split("."):
+        match = _prefix_regex.search(item)
+        if match:
+            result.extend(match.groups())
+        else:
+            result.append(item)
+    return result
+
+
+def _is_not_suffix(segment: str) -> bool:
+    return not any(
+        segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+    )
+
+
+def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
+    left_split, right_split = [], []
+
+    # Get the release segment of our versions
+    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+    # Get the rest of our versions
+    left_split.append(left[len(left_split[0]) :])
+    right_split.append(right[len(right_split[0]) :])
+
+    # Insert our padding
+    left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+    right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+
+    return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
+
+
+class SpecifierSet(BaseSpecifier):
+    """This class abstracts handling of a set of version specifiers.
+
+    It can be passed a single specifier (``>=3.0``), a comma-separated list of
+    specifiers (``>=3.0,!=3.1``), or no specifier at all.
+    """
+
+    def __init__(
+        self, specifiers: str = "", prereleases: Optional[bool] = None
+    ) -> None:
+        """Initialize a SpecifierSet instance.
+
+        :param specifiers:
+            The string representation of a specifier or a comma-separated list of
+            specifiers which will be parsed and normalized before use.
+        :param prereleases:
+            This tells the SpecifierSet if it should accept prerelease versions if
+            applicable or not. The default of ``None`` will autodetect it from the
+            given specifiers.
+
+        :raises InvalidSpecifier:
+            If the given ``specifiers`` are not parseable than this exception will be
+            raised.
+        """
+
+        # Split on `,` to break each individual specifier into it's own item, and
+        # strip each item to remove leading/trailing whitespace.
+        split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+        # Parsed each individual specifier, attempting first to make it a
+        # Specifier.
+        parsed: Set[Specifier] = set()
+        for specifier in split_specifiers:
+            parsed.add(Specifier(specifier))
+
+        # Turn our parsed specifiers into a frozen set and save them for later.
+        self._specs = frozenset(parsed)
+
+        # Store our prereleases value so we can use it later to determine if
+        # we accept prereleases or not.
+        self._prereleases = prereleases
+
+    @property
+    def prereleases(self) -> Optional[bool]:
+        # If we have been given an explicit prerelease modifier, then we'll
+        # pass that through here.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # If we don't have any specifiers, and we don't have a forced value,
+        # then we'll just return None since we don't know if this should have
+        # pre-releases or not.
+        if not self._specs:
+            return None
+
+        # Otherwise we'll see if any of the given specifiers accept
+        # prereleases, if any of them do we'll return True, otherwise False.
+        return any(s.prereleases for s in self._specs)
+
+    @prereleases.setter
+    def prereleases(self, value: bool) -> None:
+        self._prereleases = value
+
+    def __repr__(self) -> str:
+        """A representation of the specifier set that shows all internal state.
+
+        Note that the ordering of the individual specifiers within the set may not
+        match the input string.
+
+        >>> SpecifierSet('>=1.0.0,!=2.0.0')
+        <SpecifierSet('!=2.0.0,>=1.0.0')>
+        >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
+        <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
+        >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
+        <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
+        """
+        pre = (
+            f", prereleases={self.prereleases!r}"
+            if self._prereleases is not None
+            else ""
+        )
+
+        return f"<SpecifierSet({str(self)!r}{pre})>"
+
+    def __str__(self) -> str:
+        """A string representation of the specifier set that can be round-tripped.
+
+        Note that the ordering of the individual specifiers within the set may not
+        match the input string.
+
+        >>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
+        '!=1.0.1,>=1.0.0'
+        >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
+        '!=1.0.1,>=1.0.0'
+        """
+        return ",".join(sorted(str(s) for s in self._specs))
+
+    def __hash__(self) -> int:
+        return hash(self._specs)
+
+    def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
+        """Return a SpecifierSet which is a combination of the two sets.
+
+        :param other: The other object to combine with.
+
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
+        <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
+        <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
+        """
+        if isinstance(other, str):
+            other = SpecifierSet(other)
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        specifier = SpecifierSet()
+        specifier._specs = frozenset(self._specs | other._specs)
+
+        if self._prereleases is None and other._prereleases is not None:
+            specifier._prereleases = other._prereleases
+        elif self._prereleases is not None and other._prereleases is None:
+            specifier._prereleases = self._prereleases
+        elif self._prereleases == other._prereleases:
+            specifier._prereleases = self._prereleases
+        else:
+            raise ValueError(
+                "Cannot combine SpecifierSets with True and False prerelease "
+                "overrides."
+            )
+
+        return specifier
+
+    def __eq__(self, other: object) -> bool:
+        """Whether or not the two SpecifierSet-like objects are equal.
+
+        :param other: The other object to check against.
+
+        The value of :attr:`prereleases` is ignored.
+
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
+        True
+        >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
+        ...  SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
+        False
+        >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
+        False
+        """
+        if isinstance(other, (str, Specifier)):
+            other = SpecifierSet(str(other))
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        return self._specs == other._specs
+
+    def __len__(self) -> int:
+        """Returns the number of specifiers in this specifier set."""
+        return len(self._specs)
+
+    def __iter__(self) -> Iterator[Specifier]:
+        """
+        Returns an iterator over all the underlying :class:`Specifier` instances
+        in this specifier set.
+
+        >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
+        [<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
+        """
+        return iter(self._specs)
+
+    def __contains__(self, item: UnparsedVersion) -> bool:
+        """Return whether or not the item is contained in this specifier.
+
+        :param item: The item to check for.
+
+        This is used for the ``in`` operator and behaves the same as
+        :meth:`contains` with no ``prereleases`` argument passed.
+
+        >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
+        True
+        >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
+        True
+        >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
+        False
+        >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
+        False
+        >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
+        True
+        """
+        return self.contains(item)
+
+    def contains(
+        self,
+        item: UnparsedVersion,
+        prereleases: Optional[bool] = None,
+        installed: Optional[bool] = None,
+    ) -> bool:
+        """Return whether or not the item is contained in this SpecifierSet.
+
+        :param item:
+            The item to check for, which can be a version string or a
+            :class:`Version` instance.
+        :param prereleases:
+            Whether or not to match prereleases with this SpecifierSet. If set to
+            ``None`` (the default), it uses :attr:`prereleases` to determine
+            whether or not prereleases are allowed.
+
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
+        False
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
+        False
+        >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
+        True
+        >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
+        True
+        """
+        # Ensure that our item is a Version instance.
+        if not isinstance(item, Version):
+            item = Version(item)
+
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # We can determine if we're going to allow pre-releases by looking to
+        # see if any of the underlying items supports them. If none of them do
+        # and this item is a pre-release then we do not allow it and we can
+        # short circuit that here.
+        # Note: This means that 1.0.dev1 would not be contained in something
+        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+        if not prereleases and item.is_prerelease:
+            return False
+
+        if installed and item.is_prerelease:
+            item = Version(item.base_version)
+
+        # We simply dispatch to the underlying specs here to make sure that the
+        # given version is contained within all of them.
+        # Note: This use of all() here means that an empty set of specifiers
+        #       will always return True, this is an explicit design decision.
+        return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+    def filter(
+        self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+    ) -> Iterator[UnparsedVersionVar]:
+        """Filter items in the given iterable, that match the specifiers in this set.
+
+        :param iterable:
+            An iterable that can contain version strings and :class:`Version` instances.
+            The items in the iterable will be filtered according to the specifier.
+        :param prereleases:
+            Whether or not to allow prereleases in the returned iterator. If set to
+            ``None`` (the default), it will be intelligently decide whether to allow
+            prereleases or not (based on the :attr:`prereleases` attribute, and
+            whether the only versions matching are prereleases).
+
+        This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
+        because it implements the rule from :pep:`440` that a prerelease item
+        SHOULD be accepted if no other versions match the given specifier.
+
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+        ['1.3']
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
+        ['1.3', <Version('1.4')>]
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
+        []
+        >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+        ['1.3', '1.5a1']
+        >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+        ['1.3', '1.5a1']
+
+        An "empty" SpecifierSet will filter items based on the presence of prerelease
+        versions in the set.
+
+        >>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
+        ['1.3']
+        >>> list(SpecifierSet("").filter(["1.5a1"]))
+        ['1.5a1']
+        >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
+        ['1.3', '1.5a1']
+        >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
+        ['1.3', '1.5a1']
+        """
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # If we have any specifiers, then we want to wrap our iterable in the
+        # filter method for each one, this will act as a logical AND amongst
+        # each specifier.
+        if self._specs:
+            for spec in self._specs:
+                iterable = spec.filter(iterable, prereleases=bool(prereleases))
+            return iter(iterable)
+        # If we do not have any specifiers, then we need to have a rough filter
+        # which will filter out any pre-releases, unless there are no final
+        # releases.
+        else:
+            filtered: List[UnparsedVersionVar] = []
+            found_prereleases: List[UnparsedVersionVar] = []
+
+            for item in iterable:
+                parsed_version = _coerce_version(item)
+
+                # Store any item which is a pre-release for later unless we've
+                # already found a final version or we are accepting prereleases
+                if parsed_version.is_prerelease and not prereleases:
+                    if not filtered:
+                        found_prereleases.append(item)
+                else:
+                    filtered.append(item)
+
+            # If we've found no items except for pre-releases, then we'll go
+            # ahead and use the pre-releases
+            if not filtered and found_prereleases and prereleases is None:
+                return iter(found_prereleases)
+
+            return iter(filtered)
diff --git a/src/wheel/vendored/packaging/tags.py b/src/wheel/vendored/packaging/tags.py
new file mode 100644 (file)
index 0000000..37f33b1
--- /dev/null
@@ -0,0 +1,553 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import logging
+import platform
+import struct
+import subprocess
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+    Dict,
+    FrozenSet,
+    Iterable,
+    Iterator,
+    List,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+    cast,
+)
+
+from . import _manylinux, _musllinux
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+MacVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+    "python": "py",  # Generic.
+    "cpython": "cp",
+    "pypy": "pp",
+    "ironpython": "ip",
+    "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = struct.calcsize("P") == 4
+
+
+class Tag:
+    """
+    A representation of the tag triple for a wheel.
+
+    Instances are considered immutable and thus are hashable. Equality checking
+    is also supported.
+    """
+
+    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+
+    def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+        self._interpreter = interpreter.lower()
+        self._abi = abi.lower()
+        self._platform = platform.lower()
+        # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+        # that a set calls its `.disjoint()` method, which may be called hundreds of
+        # times when scanning a page of links for packages with tags matching that
+        # Set[Tag]. Pre-computing the value here produces significant speedups for
+        # downstream consumers.
+        self._hash = hash((self._interpreter, self._abi, self._platform))
+
+    @property
+    def interpreter(self) -> str:
+        return self._interpreter
+
+    @property
+    def abi(self) -> str:
+        return self._abi
+
+    @property
+    def platform(self) -> str:
+        return self._platform
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, Tag):
+            return NotImplemented
+
+        return (
+            (self._hash == other._hash)  # Short-circuit ASAP for perf reasons.
+            and (self._platform == other._platform)
+            and (self._abi == other._abi)
+            and (self._interpreter == other._interpreter)
+        )
+
+    def __hash__(self) -> int:
+        return self._hash
+
+    def __str__(self) -> str:
+        return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+    def __repr__(self) -> str:
+        return f"<{self} @ {id(self)}>"
+
+
+def parse_tag(tag: str) -> FrozenSet[Tag]:
+    """
+    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+    Returning a set is required due to the possibility that the tag is a
+    compressed tag set.
+    """
+    tags = set()
+    interpreters, abis, platforms = tag.split("-")
+    for interpreter in interpreters.split("."):
+        for abi in abis.split("."):
+            for platform_ in platforms.split("."):
+                tags.add(Tag(interpreter, abi, platform_))
+    return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
+    value: Union[int, str, None] = sysconfig.get_config_var(name)
+    if value is None and warn:
+        logger.debug(
+            "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+        )
+    return value
+
+
+def _normalize_string(string: str) -> str:
+    return string.replace(".", "_").replace("-", "_").replace(" ", "_")
+
+
+def _abi3_applies(python_version: PythonVersion) -> bool:
+    """
+    Determine if the Python version supports abi3.
+
+    PEP 384 was first implemented in Python 3.2.
+    """
+    return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+    py_version = tuple(py_version)  # To allow for version comparison.
+    abis = []
+    version = _version_nodot(py_version[:2])
+    debug = pymalloc = ucs4 = ""
+    with_debug = _get_config_var("Py_DEBUG", warn)
+    has_refcount = hasattr(sys, "gettotalrefcount")
+    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+    # extension modules is the best option.
+    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+    has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+    if with_debug or (with_debug is None and (has_refcount or has_ext)):
+        debug = "d"
+    if py_version < (3, 8):
+        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+        if with_pymalloc or with_pymalloc is None:
+            pymalloc = "m"
+        if py_version < (3, 3):
+            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+            if unicode_size == 4 or (
+                unicode_size is None and sys.maxunicode == 0x10FFFF
+            ):
+                ucs4 = "u"
+    elif debug:
+        # Debug builds can also load "normal" extension modules.
+        # We can also assume no UCS-4 or pymalloc requirement.
+        abis.append(f"cp{version}")
+    abis.insert(
+        0,
+        "cp{version}{debug}{pymalloc}{ucs4}".format(
+            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
+        ),
+    )
+    return abis
+
+
+def cpython_tags(
+    python_version: Optional[PythonVersion] = None,
+    abis: Optional[Iterable[str]] = None,
+    platforms: Optional[Iterable[str]] = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a CPython interpreter.
+
+    The tags consist of:
+    - cp<python_version>-<abi>-<platform>
+    - cp<python_version>-abi3-<platform>
+    - cp<python_version>-none-<platform>
+    - cp<less than python_version>-abi3-<platform>  # Older Python versions down to 3.2.
+
+    If python_version only specifies a major version then user-provided ABIs and
+    the 'none' ABItag will be used.
+
+    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+    their normal position and not at the beginning.
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+
+    interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+    if abis is None:
+        if len(python_version) > 1:
+            abis = _cpython_abis(python_version, warn)
+        else:
+            abis = []
+    abis = list(abis)
+    # 'abi3' and 'none' are explicitly handled later.
+    for explicit_abi in ("abi3", "none"):
+        try:
+            abis.remove(explicit_abi)
+        except ValueError:
+            pass
+
+    platforms = list(platforms or platform_tags())
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+    if _abi3_applies(python_version):
+        yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+    yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+    if _abi3_applies(python_version):
+        for minor_version in range(python_version[1] - 1, 1, -1):
+            for platform_ in platforms:
+                interpreter = "cp{version}".format(
+                    version=_version_nodot((python_version[0], minor_version))
+                )
+                yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> List[str]:
+    """
+    Return the ABI tag based on EXT_SUFFIX.
+    """
+    # The following are examples of `EXT_SUFFIX`.
+    # We want to keep the parts which are related to the ABI and remove the
+    # parts which are related to the platform:
+    # - linux:   '.cpython-310-x86_64-linux-gnu.so' => cp310
+    # - mac:     '.cpython-310-darwin.so'           => cp310
+    # - win:     '.cp310-win_amd64.pyd'             => cp310
+    # - win:     '.pyd'                             => cp37 (uses _cpython_abis())
+    # - pypy:    '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
+    # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
+    #                                               => graalpy_38_native
+
+    ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
+    if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
+        raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
+    parts = ext_suffix.split(".")
+    if len(parts) < 3:
+        # CPython3.7 and earlier uses ".pyd" on Windows.
+        return _cpython_abis(sys.version_info[:2])
+    soabi = parts[1]
+    if soabi.startswith("cpython"):
+        # non-windows
+        abi = "cp" + soabi.split("-")[1]
+    elif soabi.startswith("cp"):
+        # windows
+        abi = soabi.split("-")[0]
+    elif soabi.startswith("pypy"):
+        abi = "-".join(soabi.split("-")[:2])
+    elif soabi.startswith("graalpy"):
+        abi = "-".join(soabi.split("-")[:3])
+    elif soabi:
+        # pyston, ironpython, others?
+        abi = soabi
+    else:
+        return []
+    return [_normalize_string(abi)]
+
+
+def generic_tags(
+    interpreter: Optional[str] = None,
+    abis: Optional[Iterable[str]] = None,
+    platforms: Optional[Iterable[str]] = None,
+    *,
+    warn: bool = False,
+) -> Iterator[Tag]:
+    """
+    Yields the tags for a generic interpreter.
+
+    The tags consist of:
+    - <interpreter>-<abi>-<platform>
+
+    The "none" ABI will be added if it was not explicitly provided.
+    """
+    if not interpreter:
+        interp_name = interpreter_name()
+        interp_version = interpreter_version(warn=warn)
+        interpreter = "".join([interp_name, interp_version])
+    if abis is None:
+        abis = _generic_abi()
+    else:
+        abis = list(abis)
+    platforms = list(platforms or platform_tags())
+    if "none" not in abis:
+        abis.append("none")
+    for abi in abis:
+        for platform_ in platforms:
+            yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+    """
+    Yields Python versions in descending order.
+
+    After the latest version, the major-only version will be yielded, and then
+    all previous versions of that major version.
+    """
+    if len(py_version) > 1:
+        yield f"py{_version_nodot(py_version[:2])}"
+    yield f"py{py_version[0]}"
+    if len(py_version) > 1:
+        for minor in range(py_version[1] - 1, -1, -1):
+            yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+    python_version: Optional[PythonVersion] = None,
+    interpreter: Optional[str] = None,
+    platforms: Optional[Iterable[str]] = None,
+) -> Iterator[Tag]:
+    """
+    Yields the sequence of tags that are compatible with a specific version of Python.
+
+    The tags consist of:
+    - py*-none-<platform>
+    - <interpreter>-none-any  # ... if `interpreter` is provided.
+    - py*-none-any
+    """
+    if not python_version:
+        python_version = sys.version_info[:2]
+    platforms = list(platforms or platform_tags())
+    for version in _py_interpreter_range(python_version):
+        for platform_ in platforms:
+            yield Tag(version, "none", platform_)
+    if interpreter:
+        yield Tag(interpreter, "none", "any")
+    for version in _py_interpreter_range(python_version):
+        yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+    if not is_32bit:
+        return arch
+
+    if arch.startswith("ppc"):
+        return "ppc"
+
+    return "i386"
+
+
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+    formats = [cpu_arch]
+    if cpu_arch == "x86_64":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat64", "fat32"])
+
+    elif cpu_arch == "i386":
+        if version < (10, 4):
+            return []
+        formats.extend(["intel", "fat32", "fat"])
+
+    elif cpu_arch == "ppc64":
+        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+        if version > (10, 5) or version < (10, 4):
+            return []
+        formats.append("fat64")
+
+    elif cpu_arch == "ppc":
+        if version > (10, 6):
+            return []
+        formats.extend(["fat32", "fat"])
+
+    if cpu_arch in {"arm64", "x86_64"}:
+        formats.append("universal2")
+
+    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+        formats.append("universal")
+
+    return formats
+
+
+def mac_platforms(
+    version: Optional[MacVersion] = None, arch: Optional[str] = None
+) -> Iterator[str]:
+    """
+    Yields the platform tags for a macOS system.
+
+    The `version` parameter is a two-item tuple specifying the macOS version to
+    generate platform tags for. The `arch` parameter is the CPU architecture to
+    generate platform tags for. Both parameters default to the appropriate value
+    for the current system.
+    """
+    version_str, _, cpu_arch = platform.mac_ver()
+    if version is None:
+        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+        if version == (10, 16):
+            # When built against an older macOS SDK, Python will report macOS 10.16
+            # instead of the real version.
+            version_str = subprocess.run(
+                [
+                    sys.executable,
+                    "-sS",
+                    "-c",
+                    "import platform; print(platform.mac_ver()[0])",
+                ],
+                check=True,
+                env={"SYSTEM_VERSION_COMPAT": "0"},
+                stdout=subprocess.PIPE,
+                text=True,
+            ).stdout
+            version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+    else:
+        version = version
+    if arch is None:
+        arch = _mac_arch(cpu_arch)
+    else:
+        arch = arch
+
+    if (10, 0) <= version and version < (11, 0):
+        # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+        # "minor" version number.  The major version was always 10.
+        for minor_version in range(version[1], -1, -1):
+            compat_version = 10, minor_version
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=10, minor=minor_version, binary_format=binary_format
+                )
+
+    if version >= (11, 0):
+        # Starting with Mac OS 11, each yearly release bumps the major version
+        # number.   The minor versions are now the midyear updates.
+        for major_version in range(version[0], 10, -1):
+            compat_version = major_version, 0
+            binary_formats = _mac_binary_formats(compat_version, arch)
+            for binary_format in binary_formats:
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=major_version, minor=0, binary_format=binary_format
+                )
+
+    if version >= (11, 0):
+        # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+        # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+        # releases exist.
+        #
+        # However, the "universal2" binary format can have a
+        # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+        # that version of macOS.
+        if arch == "x86_64":
+            for minor_version in range(16, 3, -1):
+                compat_version = 10, minor_version
+                binary_formats = _mac_binary_formats(compat_version, arch)
+                for binary_format in binary_formats:
+                    yield "macosx_{major}_{minor}_{binary_format}".format(
+                        major=compat_version[0],
+                        minor=compat_version[1],
+                        binary_format=binary_format,
+                    )
+        else:
+            for minor_version in range(16, 3, -1):
+                compat_version = 10, minor_version
+                binary_format = "universal2"
+                yield "macosx_{major}_{minor}_{binary_format}".format(
+                    major=compat_version[0],
+                    minor=compat_version[1],
+                    binary_format=binary_format,
+                )
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+    linux = _normalize_string(sysconfig.get_platform())
+    if not linux.startswith("linux_"):
+        # we should never be here, just yield the sysconfig one and return
+        yield linux
+        return
+    if is_32bit:
+        if linux == "linux_x86_64":
+            linux = "linux_i686"
+        elif linux == "linux_aarch64":
+            linux = "linux_armv8l"
+    _, arch = linux.split("_", 1)
+    archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
+    yield from _manylinux.platform_tags(archs)
+    yield from _musllinux.platform_tags(archs)
+    for arch in archs:
+        yield f"linux_{arch}"
+
+
+def _generic_platforms() -> Iterator[str]:
+    yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+    """
+    Provides the platform tags for this installation.
+    """
+    if platform.system() == "Darwin":
+        return mac_platforms()
+    elif platform.system() == "Linux":
+        return _linux_platforms()
+    else:
+        return _generic_platforms()
+
+
+def interpreter_name() -> str:
+    """
+    Returns the name of the running interpreter.
+
+    Some implementations have a reserved, two-letter abbreviation which will
+    be returned when appropriate.
+    """
+    name = sys.implementation.name
+    return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+    """
+    Returns the version of the running interpreter.
+    """
+    version = _get_config_var("py_version_nodot", warn=warn)
+    if version:
+        version = str(version)
+    else:
+        version = _version_nodot(sys.version_info[:2])
+    return version
+
+
+def _version_nodot(version: PythonVersion) -> str:
+    return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+    """
+    Returns the sequence of tag triples for the running interpreter.
+
+    The order of the sequence corresponds to priority order for the
+    interpreter, from most to least important.
+    """
+
+    interp_name = interpreter_name()
+    if interp_name == "cp":
+        yield from cpython_tags(warn=warn)
+    else:
+        yield from generic_tags()
+
+    if interp_name == "pp":
+        interp = "pp3"
+    elif interp_name == "cp":
+        interp = "cp" + interpreter_version(warn=warn)
+    else:
+        interp = None
+    yield from compatible_tags(interpreter=interp)
diff --git a/src/wheel/vendored/packaging/utils.py b/src/wheel/vendored/packaging/utils.py
new file mode 100644 (file)
index 0000000..c2c2f75
--- /dev/null
@@ -0,0 +1,172 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import re
+from typing import FrozenSet, NewType, Tuple, Union, cast
+
+from .tags import Tag, parse_tag
+from .version import InvalidVersion, Version
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+NormalizedName = NewType("NormalizedName", str)
+
+
+class InvalidName(ValueError):
+    """
+    An invalid distribution name; users should refer to the packaging user guide.
+    """
+
+
+class InvalidWheelFilename(ValueError):
+    """
+    An invalid wheel filename was found, users should refer to PEP 427.
+    """
+
+
+class InvalidSdistFilename(ValueError):
+    """
+    An invalid sdist filename was found, users should refer to the packaging user guide.
+    """
+
+
+# Core metadata spec for `Name`
+_validate_regex = re.compile(
+    r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+)
+_canonicalize_regex = re.compile(r"[-_.]+")
+_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
+# PEP 427: The build number must start with a digit.
+_build_tag_regex = re.compile(r"(\d+)(.*)")
+
+
+def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
+    if validate and not _validate_regex.match(name):
+        raise InvalidName(f"name is invalid: {name!r}")
+    # This is taken from PEP 503.
+    value = _canonicalize_regex.sub("-", name).lower()
+    return cast(NormalizedName, value)
+
+
+def is_normalized_name(name: str) -> bool:
+    return _normalized_regex.match(name) is not None
+
+
+def canonicalize_version(
+    version: Union[Version, str], *, strip_trailing_zero: bool = True
+) -> str:
+    """
+    This is very similar to Version.__str__, but has one subtle difference
+    with the way it handles the release segment.
+    """
+    if isinstance(version, str):
+        try:
+            parsed = Version(version)
+        except InvalidVersion:
+            # Legacy versions cannot be normalized
+            return version
+    else:
+        parsed = version
+
+    parts = []
+
+    # Epoch
+    if parsed.epoch != 0:
+        parts.append(f"{parsed.epoch}!")
+
+    # Release segment
+    release_segment = ".".join(str(x) for x in parsed.release)
+    if strip_trailing_zero:
+        # NB: This strips trailing '.0's to normalize
+        release_segment = re.sub(r"(\.0)+$", "", release_segment)
+    parts.append(release_segment)
+
+    # Pre-release
+    if parsed.pre is not None:
+        parts.append("".join(str(x) for x in parsed.pre))
+
+    # Post-release
+    if parsed.post is not None:
+        parts.append(f".post{parsed.post}")
+
+    # Development release
+    if parsed.dev is not None:
+        parts.append(f".dev{parsed.dev}")
+
+    # Local version segment
+    if parsed.local is not None:
+        parts.append(f"+{parsed.local}")
+
+    return "".join(parts)
+
+
+def parse_wheel_filename(
+    filename: str,
+) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
+    if not filename.endswith(".whl"):
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (extension must be '.whl'): {filename}"
+        )
+
+    filename = filename[:-4]
+    dashes = filename.count("-")
+    if dashes not in (4, 5):
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (wrong number of parts): {filename}"
+        )
+
+    parts = filename.split("-", dashes - 2)
+    name_part = parts[0]
+    # See PEP 427 for the rules on escaping the project name.
+    if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
+        raise InvalidWheelFilename(f"Invalid project name: {filename}")
+    name = canonicalize_name(name_part)
+
+    try:
+        version = Version(parts[1])
+    except InvalidVersion as e:
+        raise InvalidWheelFilename(
+            f"Invalid wheel filename (invalid version): {filename}"
+        ) from e
+
+    if dashes == 5:
+        build_part = parts[2]
+        build_match = _build_tag_regex.match(build_part)
+        if build_match is None:
+            raise InvalidWheelFilename(
+                f"Invalid build number: {build_part} in '{filename}'"
+            )
+        build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
+    else:
+        build = ()
+    tags = parse_tag(parts[-1])
+    return (name, version, build, tags)
+
+
+def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
+    if filename.endswith(".tar.gz"):
+        file_stem = filename[: -len(".tar.gz")]
+    elif filename.endswith(".zip"):
+        file_stem = filename[: -len(".zip")]
+    else:
+        raise InvalidSdistFilename(
+            f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
+            f" {filename}"
+        )
+
+    # We are requiring a PEP 440 version, which cannot contain dashes,
+    # so we split on the last dash.
+    name_part, sep, version_part = file_stem.rpartition("-")
+    if not sep:
+        raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
+
+    name = canonicalize_name(name_part)
+
+    try:
+        version = Version(version_part)
+    except InvalidVersion as e:
+        raise InvalidSdistFilename(
+            f"Invalid sdist filename (invalid version): {filename}"
+        ) from e
+
+    return (name, version)
diff --git a/src/wheel/vendored/packaging/version.py b/src/wheel/vendored/packaging/version.py
new file mode 100644 (file)
index 0000000..5faab9b
--- /dev/null
@@ -0,0 +1,563 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+    from packaging.version import parse, Version
+"""
+
+import itertools
+import re
+from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
+
+from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
+
+__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
+
+LocalType = Tuple[Union[int, str], ...]
+
+CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
+CmpLocalType = Union[
+    NegativeInfinityType,
+    Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
+]
+CmpKey = Tuple[
+    int,
+    Tuple[int, ...],
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpPrePostDevType,
+    CmpLocalType,
+]
+VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
+
+
+class _Version(NamedTuple):
+    epoch: int
+    release: Tuple[int, ...]
+    dev: Optional[Tuple[str, int]]
+    pre: Optional[Tuple[str, int]]
+    post: Optional[Tuple[str, int]]
+    local: Optional[LocalType]
+
+
+def parse(version: str) -> "Version":
+    """Parse the given version string.
+
+    >>> parse('1.0.dev1')
+    <Version('1.0.dev1')>
+
+    :param version: The version string to parse.
+    :raises InvalidVersion: When the version string is not a valid version.
+    """
+    return Version(version)
+
+
+class InvalidVersion(ValueError):
+    """Raised when a version string is not a valid version.
+
+    >>> Version("invalid")
+    Traceback (most recent call last):
+        ...
+    packaging.version.InvalidVersion: Invalid version: 'invalid'
+    """
+
+
+class _BaseVersion:
+    _key: Tuple[Any, ...]
+
+    def __hash__(self) -> int:
+        return hash(self._key)
+
+    # Please keep the duplicated `isinstance` check
+    # in the six comparisons hereunder
+    # unless you find a way to avoid adding overhead function calls.
+    def __lt__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key < other._key
+
+    def __le__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key <= other._key
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key == other._key
+
+    def __ge__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key >= other._key
+
+    def __gt__(self, other: "_BaseVersion") -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key > other._key
+
+    def __ne__(self, other: object) -> bool:
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return self._key != other._key
+
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+_VERSION_PATTERN = r"""
+    v?
+    (?:
+        (?:(?P<epoch>[0-9]+)!)?                           # epoch
+        (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment
+        (?P<pre>                                          # pre-release
+            [-_\.]?
+            (?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
+            [-_\.]?
+            (?P<pre_n>[0-9]+)?
+        )?
+        (?P<post>                                         # post release
+            (?:-(?P<post_n1>[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?P<post_l>post|rev|r)
+                [-_\.]?
+                (?P<post_n2>[0-9]+)?
+            )
+        )?
+        (?P<dev>                                          # dev release
+            [-_\.]?
+            (?P<dev_l>dev)
+            [-_\.]?
+            (?P<dev_n>[0-9]+)?
+        )?
+    )
+    (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    <Version('1.0a5')>
+    >>> v2
+    <Version('1.0')>
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _key: CmpKey
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: '{version}'")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        <Version('1.0.0')>
+        """
+        return f"<Version('{self}')>"
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be rounded-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._version.epoch
+
+    @property
+    def release(self) -> Tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._version.release
+
+    @property
+    def pre(self) -> Optional[Tuple[str, int]]:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._version.pre
+
+    @property
+    def post(self) -> Optional[int]:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> Optional[int]:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> Optional[str]:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1.2.3+abc.dev1").public
+        '1.2.3'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3+abc.dev1").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
+) -> Optional[Tuple[str, int]]:
+
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: Tuple[int, ...],
+    pre: Optional[Tuple[str, int]],
+    post: Optional[Tuple[str, int]],
+    dev: Optional[Tuple[str, int]],
+    local: Optional[LocalType],
+) -> CmpKey:
+
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/src/wheel/vendored/vendor.txt b/src/wheel/vendored/vendor.txt
new file mode 100644 (file)
index 0000000..7feeca3
--- /dev/null
@@ -0,0 +1 @@
+packaging==23.2
diff --git a/src/wheel/wheelfile.py b/src/wheel/wheelfile.py
new file mode 100644 (file)
index 0000000..0cefbdb
--- /dev/null
@@ -0,0 +1,196 @@
+from __future__ import annotations
+
+import csv
+import hashlib
+import os.path
+import re
+import stat
+import time
+from io import StringIO, TextIOWrapper
+from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
+
+from wheel.cli import WheelError
+from wheel.util import log, urlsafe_b64decode, urlsafe_b64encode
+
+# Non-greedy matching of an optional build number may be too clever (more
+# invalid wheel filenames will match). Separate regex for .dist-info?
+WHEEL_INFO_RE = re.compile(
+    r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]+?))(-(?P<build>\d[^\s-]*))?
+     -(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>\S+)\.whl$""",
+    re.VERBOSE,
+)
+MINIMUM_TIMESTAMP = 315532800  # 1980-01-01 00:00:00 UTC
+
+
+def get_zipinfo_datetime(timestamp=None):
+    # Some applications need reproducible .whl files, but they can't do this without
+    # forcing the timestamp of the individual ZipInfo objects. See issue #143.
+    timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
+    timestamp = max(timestamp, MINIMUM_TIMESTAMP)
+    return time.gmtime(timestamp)[0:6]
+
+
+class WheelFile(ZipFile):
+    """A ZipFile derivative class that also reads SHA-256 hashes from
+    .dist-info/RECORD and checks any read files against those.
+    """
+
+    _default_algorithm = hashlib.sha256
+
+    def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
+        basename = os.path.basename(file)
+        self.parsed_filename = WHEEL_INFO_RE.match(basename)
+        if not basename.endswith(".whl") or self.parsed_filename is None:
+            raise WheelError(f"Bad wheel filename {basename!r}")
+
+        ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
+
+        self.dist_info_path = "{}.dist-info".format(
+            self.parsed_filename.group("namever")
+        )
+        self.record_path = self.dist_info_path + "/RECORD"
+        self._file_hashes = {}
+        self._file_sizes = {}
+        if mode == "r":
+            # Ignore RECORD and any embedded wheel signatures
+            self._file_hashes[self.record_path] = None, None
+            self._file_hashes[self.record_path + ".jws"] = None, None
+            self._file_hashes[self.record_path + ".p7s"] = None, None
+
+            # Fill in the expected hashes by reading them from RECORD
+            try:
+                record = self.open(self.record_path)
+            except KeyError:
+                raise WheelError(f"Missing {self.record_path} file") from None
+
+            with record:
+                for line in csv.reader(
+                    TextIOWrapper(record, newline="", encoding="utf-8")
+                ):
+                    path, hash_sum, size = line
+                    if not hash_sum:
+                        continue
+
+                    algorithm, hash_sum = hash_sum.split("=")
+                    try:
+                        hashlib.new(algorithm)
+                    except ValueError:
+                        raise WheelError(
+                            f"Unsupported hash algorithm: {algorithm}"
+                        ) from None
+
+                    if algorithm.lower() in {"md5", "sha1"}:
+                        raise WheelError(
+                            "Weak hash algorithm ({}) is not permitted by PEP "
+                            "427".format(algorithm)
+                        )
+
+                    self._file_hashes[path] = (
+                        algorithm,
+                        urlsafe_b64decode(hash_sum.encode("ascii")),
+                    )
+
+    def open(self, name_or_info, mode="r", pwd=None):
+        def _update_crc(newdata):
+            eof = ef._eof
+            update_crc_orig(newdata)
+            running_hash.update(newdata)
+            if eof and running_hash.digest() != expected_hash:
+                raise WheelError(f"Hash mismatch for file '{ef_name}'")
+
+        ef_name = (
+            name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
+        )
+        if (
+            mode == "r"
+            and not ef_name.endswith("/")
+            and ef_name not in self._file_hashes
+        ):
+            raise WheelError(f"No hash found for file '{ef_name}'")
+
+        ef = ZipFile.open(self, name_or_info, mode, pwd)
+        if mode == "r" and not ef_name.endswith("/"):
+            algorithm, expected_hash = self._file_hashes[ef_name]
+            if expected_hash is not None:
+                # Monkey patch the _update_crc method to also check for the hash from
+                # RECORD
+                running_hash = hashlib.new(algorithm)
+                update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
+
+        return ef
+
+    def write_files(self, base_dir):
+        log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
+        deferred = []
+        for root, dirnames, filenames in os.walk(base_dir):
+            # Sort the directory names so that `os.walk` will walk them in a
+            # defined order on the next iteration.
+            dirnames.sort()
+            for name in sorted(filenames):
+                path = os.path.normpath(os.path.join(root, name))
+                if os.path.isfile(path):
+                    arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
+                    if arcname == self.record_path:
+                        pass
+                    elif root.endswith(".dist-info"):
+                        deferred.append((path, arcname))
+                    else:
+                        self.write(path, arcname)
+
+        deferred.sort()
+        for path, arcname in deferred:
+            self.write(path, arcname)
+
+    def write(self, filename, arcname=None, compress_type=None):
+        with open(filename, "rb") as f:
+            st = os.fstat(f.fileno())
+            data = f.read()
+
+        zinfo = ZipInfo(
+            arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
+        )
+        zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
+        zinfo.compress_type = compress_type or self.compression
+        self.writestr(zinfo, data, compress_type)
+
+    def writestr(self, zinfo_or_arcname, data, compress_type=None):
+        if isinstance(zinfo_or_arcname, str):
+            zinfo_or_arcname = ZipInfo(
+                zinfo_or_arcname, date_time=get_zipinfo_datetime()
+            )
+            zinfo_or_arcname.compress_type = self.compression
+            zinfo_or_arcname.external_attr = (0o664 | stat.S_IFREG) << 16
+
+        if isinstance(data, str):
+            data = data.encode("utf-8")
+
+        ZipFile.writestr(self, zinfo_or_arcname, data, compress_type)
+        fname = (
+            zinfo_or_arcname.filename
+            if isinstance(zinfo_or_arcname, ZipInfo)
+            else zinfo_or_arcname
+        )
+        log.info(f"adding '{fname}'")
+        if fname != self.record_path:
+            hash_ = self._default_algorithm(data)
+            self._file_hashes[fname] = (
+                hash_.name,
+                urlsafe_b64encode(hash_.digest()).decode("ascii"),
+            )
+            self._file_sizes[fname] = len(data)
+
+    def close(self):
+        # Write RECORD
+        if self.fp is not None and self.mode == "w" and self._file_hashes:
+            data = StringIO()
+            writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
+            writer.writerows(
+                (
+                    (fname, algorithm + "=" + hash_, self._file_sizes[fname])
+                    for fname, (algorithm, hash_) in self._file_hashes.items()
+                )
+            )
+            writer.writerow((format(self.record_path), "", ""))
+            self.writestr(self.record_path, data.getvalue())
+
+        ZipFile.close(self)
diff --git a/tests/cli/eggnames.txt b/tests/cli/eggnames.txt
new file mode 100644 (file)
index 0000000..d422120
--- /dev/null
@@ -0,0 +1,87 @@
+vcard-0.7.8-py2.7.egg
+qtalchemy-0.7.1-py2.7.egg
+AMQPDeliver-0.1-py2.7.egg
+infi.registry-0.1.1-py2.7.egg
+infi.instruct-0.5.5-py2.7.egg
+infi.devicemanager-0.1.2-py2.7.egg
+TracTixSummary-1.0-py2.7.egg
+ToscaWidgets-0.9.12-py2.7.egg
+archipel_agent_iphone_notification-0.5.0beta-py2.7.egg
+archipel_agent_action_scheduler-0.5.0beta-py2.7.egg
+ao.social-1.0.2-py2.7.egg
+apgl-0.7-py2.7.egg
+satchmo_payment_payworld-0.1.1-py2.7.egg
+snmpsim-0.1.3-py2.7.egg
+sshim-0.2-py2.7.egg
+shove-0.3.4-py2.7.egg
+simpleavro-0.3.0-py2.7.egg
+wkhtmltopdf-0.2-py2.7.egg
+wokkel-0.7.0-py2.7.egg
+jmbo_social-0.0.6-py2.7.egg
+jmbo_post-0.0.6-py2.7.egg
+jcrack-0.0.2-py2.7.egg
+riak-1.4.0-py2.7.egg
+restclient-0.10.2-py2.7.egg
+Sutekh-0.8.1-py2.7.egg
+trayify-0.0.1-py2.7.egg
+tweepy-1.9-py2.7.egg
+topzootools-0.2.1-py2.7.egg
+haystack-0.16-py2.7.egg
+zope.interface-4.0.1-py2.7-win32.egg
+neuroshare-0.8.5-py2.7-macosx-10.7-intel.egg
+ndg_httpsclient-0.2.0-py2.7.egg
+libtele-0.3-py2.7.egg
+litex.cxpool-1.0.2-py2.7.egg
+obspy.iris-0.5.1-py2.7.egg
+obspy.mseed-0.6.1-py2.7-win32.egg
+obspy.core-0.6.2-py2.7.egg
+CorePost-0.0.3-py2.7.egg
+fnordstalk-0.0.3-py2.7.egg
+Persistence-2.13.2-py2.7-win32.egg
+Pydap-3.1.RC1-py2.7.egg
+PyExecJS-1.0.4-py2.7.egg
+Wally-0.7.2-py2.7.egg
+ExtensionClass-4.0a1-py2.7-win32.egg
+Feedjack-0.9.16-py2.7.egg
+Mars24-0.3.9-py2.7.egg
+HalWeb-0.6.0-py2.7.egg
+DARE-0.7.140-py2.7.egg
+macholib-1.3-py2.7.egg
+marrow.wsgi.egress.compression-1.1-py2.7.egg
+mcs-0.3.7-py2.7.egg
+Kook-0.6.0-py2.7.egg
+er-0.1-py2.7.egg
+evasion_director-1.1.4-py2.7.egg
+djquery-0.1a-py2.7.egg
+django_factory-0.7-py2.7.egg
+django_gizmo-0.0.3-py2.7.egg
+django_category-0.1-py2.7.egg
+dbwrap-0.3.2-py2.7.egg
+django_supergeneric-1.0-py2.7.egg
+django_dynamo-0.25-py2.7.egg
+django_acollabauth-0.1-py2.7.egg
+django_qrlink-0.1.0-py2.7.egg
+django_addons-0.6.6-py2.7.egg
+cover_grabber-1.1.2-py2.7.egg
+chem-1.1-py2.7.egg
+crud-0.1-py2.7.egg
+bongo-0.1-py2.7.egg
+bytecodehacks-April2000-py2.7.egg
+greenlet-0.3.4-py2.7-win32.egg
+ginvoke-0.3.1-py2.7.egg
+pyobjc_framework_ScriptingBridge-2.3-py2.7.egg
+pecan-0.2.0a-py2.7.egg
+pyress-0.2.0-py2.7.egg
+pyobjc_framework_PubSub-2.3-py2.7.egg
+pyobjc_framework_ExceptionHandling-2.3-py2.7.egg
+pywps-trunk-py2.7.egg
+pyobjc_framework_CFNetwork-2.3-py2.7-macosx-10.6-fat.egg
+py.saunter-0.40-py2.7.egg
+pyfnordmetric-0.0.1-py2.7.egg
+pyws-1.1.1-py2.7.egg
+prestapyt-0.4.0-py2.7.egg
+passlib-1.5.3-py2.7.egg
+pyga-2.1-py2.7.egg
+pygithub3-0.3-py2.7.egg
+pyobjc_framework_OpenDirectory-2.3-py2.7.egg
+yaposib-0.2.75-py2.7-linux-x86_64.egg
diff --git a/tests/cli/test_convert.py b/tests/cli/test_convert.py
new file mode 100644 (file)
index 0000000..4f26b23
--- /dev/null
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+import os.path
+import re
+
+from wheel.cli.convert import convert, egg_info_re
+from wheel.wheelfile import WHEEL_INFO_RE
+
+
+def test_egg_re():
+    """Make sure egg_info_re matches."""
+    egg_names_path = os.path.join(os.path.dirname(__file__), "eggnames.txt")
+    with open(egg_names_path, encoding="utf-8") as egg_names:
+        for line in egg_names:
+            line = line.strip()
+            if line:
+                assert egg_info_re.match(line), line
+
+
+def test_convert_egg(egg_paths, tmp_path):
+    convert(egg_paths, str(tmp_path), verbose=False)
+    wheel_names = [path.name for path in tmp_path.iterdir()]
+    assert len(wheel_names) == len(egg_paths)
+    assert all(WHEEL_INFO_RE.match(filename) for filename in wheel_names)
+    assert all(
+        re.match(r"^[\w\d.]+-\d\.\d-\w+\d+-[\w\d]+-[\w\d]+\.whl$", fname)
+        for fname in wheel_names
+    )
diff --git a/tests/cli/test_pack.py b/tests/cli/test_pack.py
new file mode 100644 (file)
index 0000000..31f28de
--- /dev/null
@@ -0,0 +1,81 @@
+from __future__ import annotations
+
+import email.policy
+import os
+from email.message import Message
+from email.parser import BytesParser
+from zipfile import ZipFile
+
+import pytest
+
+from wheel.cli.pack import pack
+
+THISDIR = os.path.dirname(__file__)
+TESTWHEEL_NAME = "test-1.0-py2.py3-none-any.whl"
+TESTWHEEL_PATH = os.path.join(THISDIR, "..", "testdata", TESTWHEEL_NAME)
+
+
+@pytest.mark.filterwarnings("error:Duplicate name")
+@pytest.mark.parametrize(
+    "build_tag_arg, existing_build_tag, filename",
+    [
+        (None, None, "test-1.0-py2.py3-none-any.whl"),
+        ("2b", None, "test-1.0-2b-py2.py3-none-any.whl"),
+        (None, "3", "test-1.0-3-py2.py3-none-any.whl"),
+        ("", "3", "test-1.0-py2.py3-none-any.whl"),
+    ],
+    ids=["nobuildnum", "newbuildarg", "oldbuildnum", "erasebuildnum"],
+)
+def test_pack(tmp_path_factory, tmp_path, build_tag_arg, existing_build_tag, filename):
+    unpack_dir = tmp_path_factory.mktemp("wheeldir")
+    with ZipFile(TESTWHEEL_PATH) as zf:
+        old_record = zf.read("test-1.0.dist-info/RECORD")
+        old_record_lines = sorted(
+            line.rstrip()
+            for line in old_record.split(b"\n")
+            if line and not line.startswith(b"test-1.0.dist-info/WHEEL,")
+        )
+        zf.extractall(str(unpack_dir))
+
+    if existing_build_tag:
+        # Add the build number to WHEEL
+        wheel_file_path = unpack_dir.joinpath("test-1.0.dist-info").joinpath("WHEEL")
+        wheel_file_content = wheel_file_path.read_bytes()
+        assert b"Build" not in wheel_file_content
+        wheel_file_content += b"Build: 3\r\n"
+        wheel_file_path.write_bytes(wheel_file_content)
+
+    pack(str(unpack_dir), str(tmp_path), build_tag_arg)
+    new_wheel_path = tmp_path.joinpath(filename)
+    assert new_wheel_path.is_file()
+
+    with ZipFile(str(new_wheel_path)) as zf:
+        new_record = zf.read("test-1.0.dist-info/RECORD")
+        new_record_lines = sorted(
+            line.rstrip()
+            for line in new_record.split(b"\n")
+            if line and not line.startswith(b"test-1.0.dist-info/WHEEL,")
+        )
+
+        parser = BytesParser(policy=email.policy.compat32)
+        new_wheel_file_content = parser.parsebytes(zf.read("test-1.0.dist-info/WHEEL"))
+
+    assert new_record_lines == old_record_lines
+
+    # Line endings and trailing blank line will depend on whether WHEEL
+    # was modified.  Circumvent this by comparing parsed key/value pairs.
+    expected_wheel_content = Message()
+    expected_wheel_content["Wheel-Version"] = "1.0"
+    expected_wheel_content["Generator"] = "bdist_wheel (0.30.0)"
+    expected_wheel_content["Root-Is-Purelib"] = "false"
+    expected_wheel_content["Tag"] = "py2-none-any"
+    expected_wheel_content["Tag"] = "py3-none-any"
+    expected_build_num = (
+        build_tag_arg if build_tag_arg is not None else existing_build_tag
+    )
+    if expected_build_num:
+        expected_wheel_content["Build"] = expected_build_num
+
+    assert sorted(new_wheel_file_content.items()) == sorted(
+        expected_wheel_content.items()
+    )
diff --git a/tests/cli/test_tags.py b/tests/cli/test_tags.py
new file mode 100644 (file)
index 0000000..4d4dfa1
--- /dev/null
@@ -0,0 +1,240 @@
+from __future__ import annotations
+
+import shutil
+import sys
+from pathlib import Path
+from zipfile import ZipFile
+
+import pytest
+
+from wheel.cli import main, parser
+from wheel.cli.tags import tags
+from wheel.wheelfile import WheelFile
+
+TESTDIR = Path(__file__).parent.parent
+TESTWHEEL_NAME = "test-1.0-py2.py3-none-any.whl"
+TESTWHEEL_PATH = TESTDIR / "testdata" / TESTWHEEL_NAME
+
+
+@pytest.fixture
+def wheelpath(tmp_path):
+    wheels_dir = tmp_path / "wheels"
+    wheels_dir.mkdir()
+    fn = wheels_dir / TESTWHEEL_NAME
+    # The str calls can be removed for Python 3.8+
+    shutil.copy(str(TESTWHEEL_PATH), str(fn))
+    return fn
+
+
+def test_tags_no_args(wheelpath):
+    newname = tags(str(wheelpath))
+    assert TESTWHEEL_NAME == newname
+    assert wheelpath.exists()
+
+
+def test_python_tags(wheelpath):
+    newname = tags(str(wheelpath), python_tags="py3")
+    assert TESTWHEEL_NAME.replace("py2.py3", "py3") == newname
+    output_file = wheelpath.parent / newname
+    with WheelFile(str(output_file)) as f:
+        output = f.read(f.dist_info_path + "/WHEEL")
+    assert (
+        output == b"Wheel-Version: 1.0\nGenerator: bdist_wheel (0.30.0)"
+        b"\nRoot-Is-Purelib: false\nTag: py3-none-any\n\n"
+    )
+    output_file.unlink()
+
+    newname = tags(str(wheelpath), python_tags="py2.py3")
+    assert TESTWHEEL_NAME == newname
+
+    newname = tags(str(wheelpath), python_tags="+py4", remove=True)
+    assert not wheelpath.exists()
+    assert TESTWHEEL_NAME.replace("py2.py3", "py2.py3.py4") == newname
+    output_file = wheelpath.parent / newname
+    output_file.unlink()
+
+
+def test_abi_tags(wheelpath):
+    newname = tags(str(wheelpath), abi_tags="cp33m")
+    assert TESTWHEEL_NAME.replace("none", "cp33m") == newname
+    output_file = wheelpath.parent / newname
+    output_file.unlink()
+
+    newname = tags(str(wheelpath), abi_tags="cp33m.abi3")
+    assert TESTWHEEL_NAME.replace("none", "abi3.cp33m") == newname
+    output_file = wheelpath.parent / newname
+    output_file.unlink()
+
+    newname = tags(str(wheelpath), abi_tags="none")
+    assert TESTWHEEL_NAME == newname
+
+    newname = tags(str(wheelpath), abi_tags="+abi3.cp33m", remove=True)
+    assert not wheelpath.exists()
+    assert TESTWHEEL_NAME.replace("none", "abi3.cp33m.none") == newname
+    output_file = wheelpath.parent / newname
+    output_file.unlink()
+
+
+def test_plat_tags(wheelpath):
+    newname = tags(str(wheelpath), platform_tags="linux_x86_64")
+    assert TESTWHEEL_NAME.replace("any", "linux_x86_64") == newname
+    output_file = wheelpath.parent / newname
+    assert output_file.exists()
+    output_file.unlink()
+
+    newname = tags(str(wheelpath), platform_tags="linux_x86_64.win32")
+    assert TESTWHEEL_NAME.replace("any", "linux_x86_64.win32") == newname
+    output_file = wheelpath.parent / newname
+    assert output_file.exists()
+    output_file.unlink()
+
+    newname = tags(str(wheelpath), platform_tags="+linux_x86_64.win32")
+    assert TESTWHEEL_NAME.replace("any", "any.linux_x86_64.win32") == newname
+    output_file = wheelpath.parent / newname
+    assert output_file.exists()
+    output_file.unlink()
+
+    newname = tags(str(wheelpath), platform_tags="+linux_x86_64.win32")
+    assert TESTWHEEL_NAME.replace("any", "any.linux_x86_64.win32") == newname
+    output_file = wheelpath.parent / newname
+    assert output_file.exists()
+
+    newname2 = tags(str(output_file), platform_tags="-any")
+    output_file.unlink()
+
+    assert TESTWHEEL_NAME.replace("any", "linux_x86_64.win32") == newname2
+    output_file2 = wheelpath.parent / newname2
+    assert output_file2.exists()
+    output_file2.unlink()
+
+    newname = tags(str(wheelpath), platform_tags="any")
+    assert TESTWHEEL_NAME == newname
+
+
+def test_build_tag(wheelpath):
+    newname = tags(str(wheelpath), build_tag="1bah")
+    assert TESTWHEEL_NAME.replace("-py2", "-1bah-py2") == newname
+    output_file = wheelpath.parent / newname
+    assert output_file.exists()
+    newname = tags(str(wheelpath), build_tag="")
+    assert TESTWHEEL_NAME == newname
+    output_file.unlink()
+
+
+@pytest.mark.parametrize(
+    "build_tag, error",
+    [
+        pytest.param("foo", "build tag must begin with a digit", id="digitstart"),
+        pytest.param("1-f", "invalid character ('-') in build tag", id="hyphen"),
+    ],
+)
+def test_invalid_build_tag(wheelpath, build_tag, error, monkeypatch, capsys):
+    monkeypatch.setattr(sys, "argv", [sys.argv[0], "tags", "--build", build_tag])
+    with pytest.raises(SystemExit) as exc:
+        main()
+
+    _, err = capsys.readouterr()
+    assert exc.value.args[0] == 2
+    assert f"error: argument --build: {error}" in err
+
+
+def test_multi_tags(wheelpath):
+    newname = tags(
+        str(wheelpath),
+        platform_tags="linux_x86_64",
+        python_tags="+py4",
+        build_tag="1",
+    )
+    assert "test-1.0-1-py2.py3.py4-none-linux_x86_64.whl" == newname
+
+    output_file = wheelpath.parent / newname
+    assert output_file.exists()
+    with WheelFile(str(output_file)) as f:
+        output = f.read(f.dist_info_path + "/WHEEL")
+    assert (
+        output
+        == b"Wheel-Version: 1.0\nGenerator: bdist_wheel (0.30.0)\nRoot-Is-Purelib:"
+        b" false\nTag: py2-none-linux_x86_64\nTag: py3-none-linux_x86_64\nTag:"
+        b" py4-none-linux_x86_64\nBuild: 1\n\n"
+    )
+    output_file.unlink()
+
+
+def test_tags_command(capsys, wheelpath):
+    args = [
+        "tags",
+        "--python-tag",
+        "py3",
+        "--abi-tag",
+        "cp33m",
+        "--platform-tag",
+        "linux_x86_64",
+        "--build",
+        "7",
+        str(wheelpath),
+    ]
+    p = parser()
+    args = p.parse_args(args)
+    args.func(args)
+    assert wheelpath.exists()
+
+    newname = capsys.readouterr().out.strip()
+    assert "test-1.0-7-py3-cp33m-linux_x86_64.whl" == newname
+    output_file = wheelpath.parent / newname
+    output_file.unlink()
+
+
+def test_tags_command_del(capsys, wheelpath):
+    args = [
+        "tags",
+        "--python-tag",
+        "+py4",
+        "--abi-tag",
+        "cp33m",
+        "--platform-tag",
+        "linux_x86_64",
+        "--remove",
+        str(wheelpath),
+    ]
+    p = parser()
+    args = p.parse_args(args)
+    args.func(args)
+    assert not wheelpath.exists()
+
+    newname = capsys.readouterr().out.strip()
+    assert "test-1.0-py2.py3.py4-cp33m-linux_x86_64.whl" == newname
+    output_file = wheelpath.parent / newname
+    output_file.unlink()
+
+
+def test_permission_bits(capsys, wheelpath):
+    args = [
+        "tags",
+        "--python-tag=+py4",
+        str(wheelpath),
+    ]
+    p = parser()
+    args = p.parse_args(args)
+    args.func(args)
+
+    newname = capsys.readouterr().out.strip()
+    assert "test-1.0-py2.py3.py4-none-any.whl" == newname
+    output_file = wheelpath.parent / newname
+
+    with ZipFile(str(output_file), "r") as outf:
+        with ZipFile(str(wheelpath), "r") as inf:
+            for member in inf.namelist():
+                member_info = inf.getinfo(member)
+                if member_info.is_dir():
+                    continue
+
+                if member_info.filename.endswith("/RECORD"):
+                    continue
+
+                out_attr = outf.getinfo(member).external_attr
+                inf_attr = member_info.external_attr
+                assert (
+                    out_attr == inf_attr
+                ), f"{member} 0x{out_attr:012o} != 0x{inf_attr:012o}"
+
+    output_file.unlink()
diff --git a/tests/cli/test_unpack.py b/tests/cli/test_unpack.py
new file mode 100644 (file)
index 0000000..ae584af
--- /dev/null
@@ -0,0 +1,36 @@
+from __future__ import annotations
+
+import platform
+import stat
+
+import pytest
+
+from wheel.cli.unpack import unpack
+from wheel.wheelfile import WheelFile
+
+
+def test_unpack(wheel_paths, tmp_path):
+    """
+    Make sure 'wheel unpack' works.
+    This also verifies the integrity of our testing wheel files.
+    """
+    for wheel_path in wheel_paths:
+        unpack(wheel_path, str(tmp_path))
+
+
+@pytest.mark.skipif(
+    platform.system() == "Windows", reason="Windows does not support the executable bit"
+)
+def test_unpack_executable_bit(tmp_path):
+    wheel_path = tmp_path / "test-1.0-py3-none-any.whl"
+    script_path = tmp_path / "script"
+    script_path.write_bytes(b"test script")
+    script_path.chmod(0o755)
+    with WheelFile(wheel_path, "w") as wf:
+        wf.write(str(script_path), "nested/script")
+
+    script_path.unlink()
+    script_path = tmp_path / "test-1.0" / "nested" / "script"
+    unpack(str(wheel_path), str(tmp_path))
+    assert not script_path.is_dir()
+    assert stat.S_IMODE(script_path.stat().st_mode) == 0o755
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644 (file)
index 0000000..5ee84da
--- /dev/null
@@ -0,0 +1,65 @@
+"""
+pytest local configuration plug-in
+"""
+
+from __future__ import annotations
+
+import os.path
+import subprocess
+import sys
+
+import pytest
+
+
+@pytest.fixture(scope="session")
+def wheels_and_eggs(tmp_path_factory):
+    """Build wheels and eggs from test distributions."""
+    test_distributions = (
+        "complex-dist",
+        "simple.dist",
+        "headers.dist",
+        "commasinfilenames.dist",
+        "unicode.dist",
+    )
+
+    if sys.platform != "win32":
+        # ABI3 extensions don't really work on Windows
+        test_distributions += ("abi3extension.dist",)
+
+    pwd = os.path.abspath(os.curdir)
+    this_dir = os.path.dirname(__file__)
+    build_dir = tmp_path_factory.mktemp("build")
+    dist_dir = tmp_path_factory.mktemp("dist")
+    for dist in test_distributions:
+        os.chdir(os.path.join(this_dir, "testdata", dist))
+        subprocess.check_call(
+            [
+                sys.executable,
+                "setup.py",
+                "bdist_egg",
+                "-b",
+                str(build_dir),
+                "-d",
+                str(dist_dir),
+                "bdist_wheel",
+                "-b",
+                str(build_dir),
+                "-d",
+                str(dist_dir),
+            ]
+        )
+
+    os.chdir(pwd)
+    return sorted(
+        str(fname) for fname in dist_dir.iterdir() if fname.suffix in (".whl", ".egg")
+    )
+
+
+@pytest.fixture(scope="session")
+def wheel_paths(wheels_and_eggs):
+    return [fname for fname in wheels_and_eggs if fname.endswith(".whl")]
+
+
+@pytest.fixture(scope="session")
+def egg_paths(wheels_and_eggs):
+    return [fname for fname in wheels_and_eggs if fname.endswith(".egg")]
diff --git a/tests/test_bdist_wheel.py b/tests/test_bdist_wheel.py
new file mode 100644 (file)
index 0000000..7c8fedb
--- /dev/null
@@ -0,0 +1,423 @@
+from __future__ import annotations
+
+import os.path
+import shutil
+import stat
+import struct
+import subprocess
+import sys
+import sysconfig
+from inspect import cleandoc
+from unittest.mock import Mock
+from zipfile import ZipFile
+
+import pytest
+import setuptools
+
+from wheel.bdist_wheel import (
+    bdist_wheel,
+    get_abi_tag,
+    remove_readonly,
+    remove_readonly_exc,
+)
+from wheel.vendored.packaging import tags
+from wheel.wheelfile import WheelFile
+
+DEFAULT_FILES = {
+    "dummy_dist-1.0.dist-info/top_level.txt",
+    "dummy_dist-1.0.dist-info/METADATA",
+    "dummy_dist-1.0.dist-info/WHEEL",
+    "dummy_dist-1.0.dist-info/RECORD",
+}
+DEFAULT_LICENSE_FILES = {
+    "LICENSE",
+    "LICENSE.txt",
+    "LICENCE",
+    "LICENCE.txt",
+    "COPYING",
+    "COPYING.md",
+    "NOTICE",
+    "NOTICE.rst",
+    "AUTHORS",
+    "AUTHORS.txt",
+}
+OTHER_IGNORED_FILES = {
+    "LICENSE~",
+    "AUTHORS~",
+}
+SETUPPY_EXAMPLE = """\
+from setuptools import setup
+
+setup(
+    name='dummy_dist',
+    version='1.0',
+)
+"""
+
+
+@pytest.fixture
+def dummy_dist(tmp_path_factory):
+    basedir = tmp_path_factory.mktemp("dummy_dist")
+    basedir.joinpath("setup.py").write_text(SETUPPY_EXAMPLE, encoding="utf-8")
+    for fname in DEFAULT_LICENSE_FILES | OTHER_IGNORED_FILES:
+        basedir.joinpath(fname).write_text("", encoding="utf-8")
+
+    licensedir = basedir.joinpath("licenses")
+    licensedir.mkdir()
+    licensedir.joinpath("DUMMYFILE").write_text("", encoding="utf-8")
+    return basedir
+
+
+def test_no_scripts(wheel_paths):
+    """Make sure entry point scripts are not generated."""
+    path = next(path for path in wheel_paths if "complex_dist" in path)
+    for entry in ZipFile(path).infolist():
+        assert ".data/scripts/" not in entry.filename
+
+
+def test_unicode_record(wheel_paths):
+    path = next(path for path in wheel_paths if "unicode.dist" in path)
+    with ZipFile(path) as zf:
+        record = zf.read("unicode.dist-0.1.dist-info/RECORD")
+
+    assert "åäö_日本語.py".encode() in record
+
+
+UTF8_PKG_INFO = """\
+Metadata-Version: 2.1
+Name: helloworld
+Version: 42
+Author-email: "John X. Ãørçeč" <john@utf8.org>, Γαμα קּ 東 <gama@utf8.org>
+
+
+UTF-8 描述 説明
+"""
+
+
+def test_preserve_unicode_metadata(monkeypatch, tmp_path):
+    monkeypatch.chdir(tmp_path)
+    egginfo = tmp_path / "dummy_dist.egg-info"
+    distinfo = tmp_path / "dummy_dist.dist-info"
+
+    egginfo.mkdir()
+    (egginfo / "PKG-INFO").write_text(UTF8_PKG_INFO, encoding="utf-8")
+    (egginfo / "dependency_links.txt").touch()
+
+    class simpler_bdist_wheel(bdist_wheel):
+        """Avoid messing with setuptools/distutils internals"""
+
+        def __init__(self):
+            pass
+
+        @property
+        def license_paths(self):
+            return []
+
+    cmd_obj = simpler_bdist_wheel()
+    cmd_obj.egg2dist(egginfo, distinfo)
+
+    metadata = (distinfo / "METADATA").read_text(encoding="utf-8")
+    assert 'Author-email: "John X. Ãørçeč"' in metadata
+    assert "Γαμα קּ 東 " in metadata
+    assert "UTF-8 描述 説明" in metadata
+
+
+def test_licenses_default(dummy_dist, monkeypatch, tmp_path):
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "-b", str(tmp_path), "--universal"]
+    )
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        license_files = {
+            "dummy_dist-1.0.dist-info/" + fname for fname in DEFAULT_LICENSE_FILES
+        }
+        assert set(wf.namelist()) == DEFAULT_FILES | license_files
+
+
+def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path):
+    dummy_dist.joinpath("setup.cfg").write_text(
+        "[metadata]\nlicense_file=licenses/DUMMYFILE", encoding="utf-8"
+    )
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "-b", str(tmp_path), "--universal"]
+    )
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        license_files = {"dummy_dist-1.0.dist-info/DUMMYFILE"}
+        assert set(wf.namelist()) == DEFAULT_FILES | license_files
+
+
+@pytest.mark.parametrize(
+    "config_file, config",
+    [
+        ("setup.cfg", "[metadata]\nlicense_files=licenses/*\n  LICENSE"),
+        ("setup.cfg", "[metadata]\nlicense_files=licenses/*, LICENSE"),
+        (
+            "setup.py",
+            SETUPPY_EXAMPLE.replace(
+                ")", "  license_files=['licenses/DUMMYFILE', 'LICENSE'])"
+            ),
+        ),
+    ],
+)
+def test_licenses_override(dummy_dist, monkeypatch, tmp_path, config_file, config):
+    dummy_dist.joinpath(config_file).write_text(config, encoding="utf-8")
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "-b", str(tmp_path), "--universal"]
+    )
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        license_files = {
+            "dummy_dist-1.0.dist-info/" + fname for fname in {"DUMMYFILE", "LICENSE"}
+        }
+        assert set(wf.namelist()) == DEFAULT_FILES | license_files
+
+
+def test_licenses_disabled(dummy_dist, monkeypatch, tmp_path):
+    dummy_dist.joinpath("setup.cfg").write_text(
+        "[metadata]\nlicense_files=\n", encoding="utf-8"
+    )
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "-b", str(tmp_path), "--universal"]
+    )
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        assert set(wf.namelist()) == DEFAULT_FILES
+
+
+def test_build_number(dummy_dist, monkeypatch, tmp_path):
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call(
+        [
+            sys.executable,
+            "setup.py",
+            "bdist_wheel",
+            "-b",
+            str(tmp_path),
+            "--universal",
+            "--build-number=2",
+        ]
+    )
+    with WheelFile("dist/dummy_dist-1.0-2-py2.py3-none-any.whl") as wf:
+        filenames = set(wf.namelist())
+        assert "dummy_dist-1.0.dist-info/RECORD" in filenames
+        assert "dummy_dist-1.0.dist-info/METADATA" in filenames
+
+
+def test_limited_abi(monkeypatch, tmp_path):
+    """Test that building a binary wheel with the limited ABI works."""
+    this_dir = os.path.dirname(__file__)
+    source_dir = os.path.join(this_dir, "testdata", "extension.dist")
+    build_dir = tmp_path.joinpath("build")
+    dist_dir = tmp_path.joinpath("dist")
+    monkeypatch.chdir(source_dir)
+    subprocess.check_call(
+        [
+            sys.executable,
+            "setup.py",
+            "bdist_wheel",
+            "-b",
+            str(build_dir),
+            "-d",
+            str(dist_dir),
+        ]
+    )
+
+
+def test_build_from_readonly_tree(dummy_dist, monkeypatch, tmp_path):
+    basedir = str(tmp_path.joinpath("dummy"))
+    shutil.copytree(str(dummy_dist), basedir)
+    monkeypatch.chdir(basedir)
+
+    # Make the tree read-only
+    for root, _dirs, files in os.walk(basedir):
+        for fname in files:
+            os.chmod(os.path.join(root, fname), stat.S_IREAD)
+
+    subprocess.check_call([sys.executable, "setup.py", "bdist_wheel"])
+
+
+@pytest.mark.parametrize(
+    "option, compress_type",
+    list(bdist_wheel.supported_compressions.items()),
+    ids=list(bdist_wheel.supported_compressions),
+)
+def test_compression(dummy_dist, monkeypatch, tmp_path, option, compress_type):
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call(
+        [
+            sys.executable,
+            "setup.py",
+            "bdist_wheel",
+            "-b",
+            str(tmp_path),
+            "--universal",
+            f"--compression={option}",
+        ]
+    )
+    with WheelFile("dist/dummy_dist-1.0-py2.py3-none-any.whl") as wf:
+        filenames = set(wf.namelist())
+        assert "dummy_dist-1.0.dist-info/RECORD" in filenames
+        assert "dummy_dist-1.0.dist-info/METADATA" in filenames
+        for zinfo in wf.filelist:
+            assert zinfo.compress_type == compress_type
+
+
+def test_wheelfile_line_endings(wheel_paths):
+    for path in wheel_paths:
+        with WheelFile(path) as wf:
+            wheelfile = next(fn for fn in wf.filelist if fn.filename.endswith("WHEEL"))
+            wheelfile_contents = wf.read(wheelfile)
+            assert b"\r" not in wheelfile_contents
+
+
+def test_unix_epoch_timestamps(dummy_dist, monkeypatch, tmp_path):
+    monkeypatch.setenv("SOURCE_DATE_EPOCH", "0")
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call(
+        [
+            sys.executable,
+            "setup.py",
+            "bdist_wheel",
+            "-b",
+            str(tmp_path),
+            "--universal",
+            "--build-number=2",
+        ]
+    )
+
+
+def test_get_abi_tag_windows(monkeypatch):
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "cp")
+    monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "cp313-win_amd64")
+    assert get_abi_tag() == "cp313"
+
+
+def test_get_abi_tag_pypy_old(monkeypatch):
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
+    monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy36-pp73")
+    assert get_abi_tag() == "pypy36_pp73"
+
+
+def test_get_abi_tag_pypy_new(monkeypatch):
+    monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "pypy37-pp73-darwin")
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "pp")
+    assert get_abi_tag() == "pypy37_pp73"
+
+
+def test_get_abi_tag_graalpy(monkeypatch):
+    monkeypatch.setattr(
+        sysconfig, "get_config_var", lambda x: "graalpy231-310-native-x86_64-linux"
+    )
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "graalpy")
+    assert get_abi_tag() == "graalpy231_310_native"
+
+
+def test_get_abi_tag_fallback(monkeypatch):
+    monkeypatch.setattr(sysconfig, "get_config_var", lambda x: "unknown-python-310")
+    monkeypatch.setattr(tags, "interpreter_name", lambda: "unknown-python")
+    assert get_abi_tag() == "unknown_python_310"
+
+
+def test_platform_with_space(dummy_dist, monkeypatch):
+    """Ensure building on platforms with a space in the name succeed."""
+    monkeypatch.chdir(dummy_dist)
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "--plat-name", "isilon onefs"]
+    )
+
+
+def test_rmtree_readonly(monkeypatch, tmp_path, capsys):
+    """Verify onerr works as expected"""
+
+    bdist_dir = tmp_path / "with_readonly"
+    bdist_dir.mkdir()
+    some_file = bdist_dir.joinpath("file.txt")
+    some_file.touch()
+    some_file.chmod(stat.S_IREAD)
+
+    expected_count = 1 if sys.platform.startswith("win") else 0
+
+    if sys.version_info < (3, 12):
+        count_remove_readonly = Mock(side_effect=remove_readonly)
+        shutil.rmtree(bdist_dir, onerror=count_remove_readonly)
+        assert count_remove_readonly.call_count == expected_count
+    else:
+        count_remove_readonly_exc = Mock(side_effect=remove_readonly_exc)
+        shutil.rmtree(bdist_dir, onexc=count_remove_readonly_exc)
+        assert count_remove_readonly_exc.call_count == expected_count
+
+    assert not bdist_dir.is_dir()
+
+    if expected_count:
+        captured = capsys.readouterr()
+        assert "file.txt" in captured.stdout
+
+
+def test_data_dir_with_tag_build(monkeypatch, tmp_path):
+    """
+    Setuptools allow authors to set PEP 440's local version segments
+    using ``egg_info.tag_build``. This should be reflected not only in the
+    ``.whl`` file name, but also in the ``.dist-info`` and ``.data`` dirs.
+    See pypa/setuptools#3997.
+    """
+    monkeypatch.chdir(tmp_path)
+    files = {
+        "setup.py": """
+            from setuptools import setup
+            setup(headers=["hello.h"])
+            """,
+        "setup.cfg": """
+            [metadata]
+            name = test
+            version = 1.0
+
+            [options.data_files]
+            hello/world = file.txt
+
+            [egg_info]
+            tag_build = +what
+            tag_date = 0
+            """,
+        "file.txt": "",
+        "hello.h": "",
+    }
+    for file, content in files.items():
+        with open(file, "w", encoding="utf-8") as fh:
+            fh.write(cleandoc(content))
+
+    subprocess.check_call([sys.executable, "setup.py", "bdist_wheel"])
+
+    # Ensure .whl, .dist-info and .data contain the local segment
+    wheel_path = "dist/test-1.0+what-py3-none-any.whl"
+    assert os.path.exists(wheel_path)
+    entries = set(ZipFile(wheel_path).namelist())
+    for expected in (
+        "test-1.0+what.data/headers/hello.h",
+        "test-1.0+what.data/data/hello/world/file.txt",
+        "test-1.0+what.dist-info/METADATA",
+        "test-1.0+what.dist-info/WHEEL",
+    ):
+        assert expected in entries
+
+    for not_expected in (
+        "test.data/headers/hello.h",
+        "test-1.0.data/data/hello/world/file.txt",
+        "test.dist-info/METADATA",
+        "test-1.0.dist-info/WHEEL",
+    ):
+        assert not_expected not in entries
+
+
+@pytest.mark.parametrize(
+    "reported,expected",
+    [("linux-x86_64", "linux_i686"), ("linux-aarch64", "linux_armv7l")],
+)
+def test_platform_linux32(reported, expected, monkeypatch):
+    monkeypatch.setattr(struct, "calcsize", lambda x: 4)
+    dist = setuptools.Distribution()
+    cmd = bdist_wheel(dist)
+    cmd.plat_name = reported
+    cmd.root_is_pure = False
+    _, _, actual = cmd.get_tag()
+    assert actual == expected
diff --git a/tests/test_macosx_libfile.py b/tests/test_macosx_libfile.py
new file mode 100644 (file)
index 0000000..ef8dc96
--- /dev/null
@@ -0,0 +1,226 @@
+from __future__ import annotations
+
+import os
+import struct
+import sysconfig
+
+import pytest
+
+from wheel.bdist_wheel import get_platform
+from wheel.macosx_libfile import extract_macosx_min_system_version
+
+
+def test_read_from_dylib():
+    dirname = os.path.dirname(__file__)
+    dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+    versions = [
+        ("test_lib_10_6_fat.dylib", "10.6.0"),
+        ("test_lib_10_10_fat.dylib", "10.10.0"),
+        ("test_lib_10_14_fat.dylib", "10.14.0"),
+        ("test_lib_10_6.dylib", "10.6.0"),
+        ("test_lib_10_10.dylib", "10.10.0"),
+        ("test_lib_10_14.dylib", "10.14.0"),
+        ("test_lib_10_6_386.dylib", "10.6.0"),
+        ("test_lib_10_10_386.dylib", "10.10.0"),
+        ("test_lib_10_14_386.dylib", "10.14.0"),
+        ("test_lib_multiple_fat.dylib", "10.14.0"),
+        ("test_lib_10_10_10.dylib", "10.10.10"),
+        ("test_lib_11.dylib", "11.0.0"),
+        ("test_lib_10_9_universal2.dylib", "10.9.0"),
+    ]
+    for file_name, ver in versions:
+        extracted = extract_macosx_min_system_version(
+            os.path.join(dylib_dir, file_name)
+        )
+        str_ver = ".".join([str(x) for x in extracted])
+        assert str_ver == ver
+    assert (
+        extract_macosx_min_system_version(os.path.join(dylib_dir, "test_lib.c")) is None
+    )
+    assert (
+        extract_macosx_min_system_version(os.path.join(dylib_dir, "libb.dylib")) is None
+    )
+
+
+def return_factory(return_val):
+    def fun(*args, **kwargs):
+        return return_val
+
+    return fun
+
+
+class TestGetPlatformMacosx:
+    def test_simple(self, monkeypatch):
+        dirname = os.path.dirname(__file__)
+        dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+        monkeypatch.setattr(
+            sysconfig, "get_platform", return_factory("macosx-11.0-x86_64")
+        )
+        assert get_platform(dylib_dir) == "macosx_11_0_x86_64"
+
+    def test_version_bump(self, monkeypatch, capsys):
+        dirname = os.path.dirname(__file__)
+        dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+        monkeypatch.setattr(
+            sysconfig, "get_platform", return_factory("macosx-10.9-x86_64")
+        )
+        assert get_platform(dylib_dir) == "macosx_11_0_x86_64"
+        captured = capsys.readouterr()
+        assert "[WARNING] This wheel needs a higher macOS version than" in captured.err
+
+    def test_information_about_problematic_files_python_version(
+        self, monkeypatch, capsys
+    ):
+        dirname = os.path.dirname(__file__)
+        dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+        monkeypatch.setattr(
+            sysconfig, "get_platform", return_factory("macosx-10.9-x86_64")
+        )
+        monkeypatch.setattr(
+            os,
+            "walk",
+            return_factory(
+                [(dylib_dir, [], ["test_lib_10_6.dylib", "test_lib_10_10_fat.dylib"])]
+            ),
+        )
+        assert get_platform(dylib_dir) == "macosx_10_10_x86_64"
+        captured = capsys.readouterr()
+        assert "[WARNING] This wheel needs a higher macOS version than" in captured.err
+        assert (
+            "the version your Python interpreter is compiled against." in captured.err
+        )
+        assert "test_lib_10_10_fat.dylib" in captured.err
+
+    def test_information_about_problematic_files_env_variable(
+        self, monkeypatch, capsys
+    ):
+        dirname = os.path.dirname(__file__)
+        dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+        monkeypatch.setattr(
+            sysconfig, "get_platform", return_factory("macosx-10.9-x86_64")
+        )
+        monkeypatch.setenv("MACOSX_DEPLOYMENT_TARGET", "10.8")
+        monkeypatch.setattr(
+            os,
+            "walk",
+            return_factory(
+                [(dylib_dir, [], ["test_lib_10_6.dylib", "test_lib_10_10_fat.dylib"])]
+            ),
+        )
+        assert get_platform(dylib_dir) == "macosx_10_10_x86_64"
+        captured = capsys.readouterr()
+        assert "[WARNING] This wheel needs a higher macOS version than" in captured.err
+        assert "is set in MACOSX_DEPLOYMENT_TARGET variable." in captured.err
+        assert "test_lib_10_10_fat.dylib" in captured.err
+
+    def test_bump_platform_tag_by_env_variable(self, monkeypatch, capsys):
+        dirname = os.path.dirname(__file__)
+        dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+        monkeypatch.setattr(
+            sysconfig, "get_platform", return_factory("macosx-10.9-x86_64")
+        )
+        monkeypatch.setattr(
+            os,
+            "walk",
+            return_factory(
+                [(dylib_dir, [], ["test_lib_10_6.dylib", "test_lib_10_6_fat.dylib"])]
+            ),
+        )
+        assert get_platform(dylib_dir) == "macosx_10_9_x86_64"
+        monkeypatch.setenv("MACOSX_DEPLOYMENT_TARGET", "10.10")
+        assert get_platform(dylib_dir) == "macosx_10_10_x86_64"
+        captured = capsys.readouterr()
+        assert captured.err == ""
+
+    def test_bugfix_release_platform_tag(self, monkeypatch, capsys):
+        dirname = os.path.dirname(__file__)
+        dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+        monkeypatch.setattr(
+            sysconfig, "get_platform", return_factory("macosx-10.9-x86_64")
+        )
+        monkeypatch.setattr(
+            os,
+            "walk",
+            return_factory(
+                [
+                    (
+                        dylib_dir,
+                        [],
+                        [
+                            "test_lib_10_6.dylib",
+                            "test_lib_10_6_fat.dylib",
+                            "test_lib_10_10_10.dylib",
+                        ],
+                    )
+                ]
+            ),
+        )
+        assert get_platform(dylib_dir) == "macosx_10_10_x86_64"
+        captured = capsys.readouterr()
+        assert "This wheel needs a higher macOS version than" in captured.err
+        monkeypatch.setenv("MACOSX_DEPLOYMENT_TARGET", "10.9")
+        assert get_platform(dylib_dir) == "macosx_10_10_x86_64"
+        captured = capsys.readouterr()
+        assert "This wheel needs a higher macOS version than" in captured.err
+
+    def test_warning_on_to_low_env_variable(self, monkeypatch, capsys):
+        dirname = os.path.dirname(__file__)
+        dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+        monkeypatch.setattr(
+            sysconfig, "get_platform", return_factory("macosx-10.9-x86_64")
+        )
+        monkeypatch.setenv("MACOSX_DEPLOYMENT_TARGET", "10.8")
+        monkeypatch.setattr(
+            os,
+            "walk",
+            return_factory(
+                [(dylib_dir, [], ["test_lib_10_6.dylib", "test_lib_10_6_fat.dylib"])]
+            ),
+        )
+        assert get_platform(dylib_dir) == "macosx_10_9_x86_64"
+        captured = capsys.readouterr()
+        assert (
+            "MACOSX_DEPLOYMENT_TARGET is set to a lower value (10.8) than the"
+            in captured.err
+        )
+
+    def test_get_platform_bigsur_env(self, monkeypatch):
+        dirname = os.path.dirname(__file__)
+        dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+        monkeypatch.setattr(
+            sysconfig, "get_platform", return_factory("macosx-10.9-x86_64")
+        )
+        monkeypatch.setenv("MACOSX_DEPLOYMENT_TARGET", "11")
+        monkeypatch.setattr(
+            os,
+            "walk",
+            return_factory(
+                [(dylib_dir, [], ["test_lib_10_6.dylib", "test_lib_10_10_fat.dylib"])]
+            ),
+        )
+        assert get_platform(dylib_dir) == "macosx_11_0_x86_64"
+
+    def test_get_platform_bigsur_platform(self, monkeypatch):
+        dirname = os.path.dirname(__file__)
+        dylib_dir = os.path.join(dirname, "testdata", "macosx_minimal_system_version")
+        monkeypatch.setattr(
+            sysconfig, "get_platform", return_factory("macosx-11-x86_64")
+        )
+        monkeypatch.setattr(
+            os,
+            "walk",
+            return_factory(
+                [(dylib_dir, [], ["test_lib_10_6.dylib", "test_lib_10_10_fat.dylib"])]
+            ),
+        )
+        assert get_platform(dylib_dir) == "macosx_11_0_x86_64"
+
+
+@pytest.mark.parametrize(
+    "reported,expected",
+    [("linux-x86_64", "linux_i686"), ("linux-aarch64", "linux_armv7l")],
+)
+def test_get_platform_linux32(reported, expected, monkeypatch):
+    monkeypatch.setattr(sysconfig, "get_platform", return_factory(reported))
+    monkeypatch.setattr(struct, "calcsize", lambda x: 4)
+    assert get_platform(None) == expected
diff --git a/tests/test_metadata.py b/tests/test_metadata.py
new file mode 100644 (file)
index 0000000..8480732
--- /dev/null
@@ -0,0 +1,85 @@
+from __future__ import annotations
+
+from wheel.metadata import pkginfo_to_metadata
+
+
+def test_pkginfo_to_metadata(tmp_path):
+    expected_metadata = [
+        ("Metadata-Version", "2.1"),
+        ("Name", "spam"),
+        ("Version", "0.1"),
+        ("Requires-Dist", "pip @ https://github.com/pypa/pip/archive/1.3.1.zip"),
+        ("Requires-Dist", 'pywin32 ; sys_platform=="win32"'),
+        ("Requires-Dist", 'foo @ http://host/foo.zip ; sys_platform=="win32"'),
+        ("Provides-Extra", "signatures"),
+        (
+            "Requires-Dist",
+            "pyxdg ; (sys_platform!=\"win32\") and extra == 'signatures'",
+        ),
+        ("Provides-Extra", "empty_extra"),
+        ("Provides-Extra", "extra"),
+        ("Requires-Dist", "bar @ http://host/bar.zip ; extra == 'extra'"),
+        ("Provides-Extra", "faster-signatures"),
+        ("Requires-Dist", "ed25519ll ; extra == 'faster-signatures'"),
+        ("Provides-Extra", "rest"),
+        ("Requires-Dist", "docutils >=0.8 ; extra == 'rest'"),
+        ("Requires-Dist", "keyring ; extra == 'signatures'"),
+        ("Requires-Dist", "keyrings.alt ; extra == 'signatures'"),
+        ("Provides-Extra", "test"),
+        ("Requires-Dist", "pytest >=3.0.0 ; extra == 'test'"),
+        ("Requires-Dist", "pytest-cov ; extra == 'test'"),
+    ]
+
+    pkg_info = tmp_path.joinpath("PKG-INFO")
+    pkg_info.write_text(
+        """\
+Metadata-Version: 0.0
+Name: spam
+Version: 0.1
+Provides-Extra: empty+extra
+Provides-Extra: test
+Provides-Extra: reST
+Provides-Extra: signatures
+Provides-Extra: Signatures
+Provides-Extra: faster-signatures""",
+        encoding="utf-8",
+    )
+
+    egg_info_dir = tmp_path.joinpath("test.egg-info")
+    egg_info_dir.mkdir(exist_ok=True)
+    egg_info_dir.joinpath("requires.txt").write_text(
+        """\
+pip@https://github.com/pypa/pip/archive/1.3.1.zip
+
+[extra]
+bar @ http://host/bar.zip
+
+[empty+extra]
+
+[:sys_platform=="win32"]
+pywin32
+foo @http://host/foo.zip
+
+[faster-signatures]
+ed25519ll
+
+[reST]
+docutils>=0.8
+
+[signatures]
+keyring
+keyrings.alt
+
+[Signatures:sys_platform!="win32"]
+pyxdg
+
+[test]
+pytest>=3.0.0
+pytest-cov""",
+        encoding="utf-8",
+    )
+
+    message = pkginfo_to_metadata(
+        egg_info_path=str(egg_info_dir), pkginfo_path=str(pkg_info)
+    )
+    assert message.items() == expected_metadata
diff --git a/tests/test_sdist.py b/tests/test_sdist.py
new file mode 100644 (file)
index 0000000..cc253b7
--- /dev/null
@@ -0,0 +1,50 @@
+import subprocess
+import sys
+import tarfile
+from pathlib import Path
+
+import pytest
+
+pytest.importorskip("flit")
+pytest.importorskip("build")
+
+# This test must be run from the source directory - okay to skip if not
+DIR = Path(__file__).parent.resolve()
+MAIN_DIR = DIR.parent
+
+
+def test_compare_sdists(monkeypatch, tmp_path):
+    monkeypatch.chdir(MAIN_DIR)
+
+    sdist_build_dir = tmp_path / "bdir"
+
+    subprocess.run(
+        [
+            sys.executable,
+            "-m",
+            "build",
+            "--sdist",
+            "--no-isolation",
+            f"--outdir={sdist_build_dir}",
+        ],
+        check=True,
+    )
+
+    (sdist_build,) = sdist_build_dir.glob("*.tar.gz")
+
+    # Flit doesn't allow targeting directories, as far as I can tell
+    process = subprocess.run(
+        [sys.executable, "-m", "flit", "build", "--format=sdist"],
+        stderr=subprocess.PIPE,
+    )
+    if process.returncode != 0:
+        pytest.fail(process.stderr.decode("utf-8"))
+
+    (sdist_flit,) = Path("dist").glob("*.tar.gz")
+
+    out = [set(), set()]
+    for i, sdist in enumerate([sdist_build, sdist_flit]):
+        with tarfile.open(str(sdist), "r:gz") as tar:
+            out[i] = set(tar.getnames())
+
+    assert out[0] == (out[1] - {"setup.py"})
diff --git a/tests/test_tagopt.py b/tests/test_tagopt.py
new file mode 100644 (file)
index 0000000..677f704
--- /dev/null
@@ -0,0 +1,221 @@
+"""
+Tests for the bdist_wheel tag options (--python-tag, --universal, and
+--plat-name)
+"""
+
+from __future__ import annotations
+
+import subprocess
+import sys
+
+import pytest
+
+SETUP_PY = """\
+from setuptools import setup, Extension
+
+setup(
+    name="Test",
+    version="1.0",
+    author_email="author@example.com",
+    py_modules=["test"],
+    {ext_modules}
+)
+"""
+
+EXT_MODULES = "ext_modules=[Extension('_test', sources=['test.c'])],"
+
+
+@pytest.fixture
+def temp_pkg(request, tmp_path):
+    tmp_path.joinpath("test.py").write_text('print("Hello, world")', encoding="utf-8")
+
+    ext = getattr(request, "param", [False, ""])
+    if ext[0]:
+        # if ext[1] is not '', it will write a bad header and fail to compile
+        tmp_path.joinpath("test.c").write_text(
+            "#include <std%sio.h>" % ext[1], encoding="utf-8"
+        )
+        setup_py = SETUP_PY.format(ext_modules=EXT_MODULES)
+    else:
+        setup_py = SETUP_PY.format(ext_modules="")
+
+    tmp_path.joinpath("setup.py").write_text(setup_py, encoding="utf-8")
+    if ext[0]:
+        try:
+            subprocess.check_call(
+                [sys.executable, "setup.py", "build_ext"], cwd=str(tmp_path)
+            )
+        except subprocess.CalledProcessError:
+            pytest.skip("Cannot compile C extensions")
+    return tmp_path
+
+
+@pytest.mark.parametrize("temp_pkg", [[True, "xxx"]], indirect=["temp_pkg"])
+def test_nocompile_skips(temp_pkg):
+    assert False  # noqa: B011 - should have skipped with a "Cannot compile" message
+
+
+def test_default_tag(temp_pkg):
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel"], cwd=str(temp_pkg)
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name == f"Test-1.0-py{sys.version_info[0]}-none-any.whl"
+    assert wheels[0].suffix == ".whl"
+
+
+def test_build_number(temp_pkg):
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "--build-number=1"],
+        cwd=str(temp_pkg),
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name == f"Test-1.0-1-py{sys.version_info[0]}-none-any.whl"
+    assert wheels[0].suffix == ".whl"
+
+
+def test_explicit_tag(temp_pkg):
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "--python-tag=py32"],
+        cwd=str(temp_pkg),
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.startswith("Test-1.0-py32-")
+    assert wheels[0].suffix == ".whl"
+
+
+def test_universal_tag(temp_pkg):
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "--universal"], cwd=str(temp_pkg)
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.startswith("Test-1.0-py2.py3-")
+    assert wheels[0].suffix == ".whl"
+
+
+def test_universal_beats_explicit_tag(temp_pkg):
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "--universal", "--python-tag=py32"],
+        cwd=str(temp_pkg),
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.startswith("Test-1.0-py2.py3-")
+    assert wheels[0].suffix == ".whl"
+
+
+def test_universal_in_setup_cfg(temp_pkg):
+    temp_pkg.joinpath("setup.cfg").write_text(
+        "[bdist_wheel]\nuniversal=1", encoding="utf-8"
+    )
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel"], cwd=str(temp_pkg)
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.startswith("Test-1.0-py2.py3-")
+    assert wheels[0].suffix == ".whl"
+
+
+def test_pythontag_in_setup_cfg(temp_pkg):
+    temp_pkg.joinpath("setup.cfg").write_text(
+        "[bdist_wheel]\npython_tag=py32", encoding="utf-8"
+    )
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel"], cwd=str(temp_pkg)
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.startswith("Test-1.0-py32-")
+    assert wheels[0].suffix == ".whl"
+
+
+def test_legacy_wheel_section_in_setup_cfg(temp_pkg):
+    temp_pkg.joinpath("setup.cfg").write_text("[wheel]\nuniversal=1", encoding="utf-8")
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel"], cwd=str(temp_pkg)
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.startswith("Test-1.0-py2.py3-")
+    assert wheels[0].suffix == ".whl"
+
+
+def test_plat_name_purepy(temp_pkg):
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "--plat-name=testplat.pure"],
+        cwd=str(temp_pkg),
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.endswith("-testplat_pure.whl")
+    assert wheels[0].suffix == ".whl"
+
+
+@pytest.mark.parametrize("temp_pkg", [[True, ""]], indirect=["temp_pkg"])
+def test_plat_name_ext(temp_pkg):
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel", "--plat-name=testplat.arch"],
+        cwd=str(temp_pkg),
+    )
+
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.endswith("-testplat_arch.whl")
+    assert wheels[0].suffix == ".whl"
+
+
+def test_plat_name_purepy_in_setupcfg(temp_pkg):
+    temp_pkg.joinpath("setup.cfg").write_text(
+        "[bdist_wheel]\nplat_name=testplat.pure", encoding="utf-8"
+    )
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel"], cwd=str(temp_pkg)
+    )
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.endswith("-testplat_pure.whl")
+    assert wheels[0].suffix == ".whl"
+
+
+@pytest.mark.parametrize("temp_pkg", [[True, ""]], indirect=["temp_pkg"])
+def test_plat_name_ext_in_setupcfg(temp_pkg):
+    temp_pkg.joinpath("setup.cfg").write_text(
+        "[bdist_wheel]\nplat_name=testplat.arch", encoding="utf-8"
+    )
+    subprocess.check_call(
+        [sys.executable, "setup.py", "bdist_wheel"], cwd=str(temp_pkg)
+    )
+
+    dist_dir = temp_pkg.joinpath("dist")
+    assert dist_dir.is_dir()
+    wheels = list(dist_dir.iterdir())
+    assert len(wheels) == 1
+    assert wheels[0].name.endswith("-testplat_arch.whl")
+    assert wheels[0].suffix == ".whl"
diff --git a/tests/test_wheelfile.py b/tests/test_wheelfile.py
new file mode 100644 (file)
index 0000000..b587821
--- /dev/null
@@ -0,0 +1,193 @@
+from __future__ import annotations
+
+import stat
+import sys
+from zipfile import ZIP_DEFLATED, ZipFile
+
+import pytest
+
+from wheel.cli import WheelError
+from wheel.wheelfile import WheelFile
+
+
+@pytest.fixture
+def wheel_path(tmp_path):
+    return str(tmp_path.joinpath("test-1.0-py2.py3-none-any.whl"))
+
+
+@pytest.mark.parametrize(
+    "filename",
+    [
+        "foo-2-py3-none-any.whl",
+        "foo-2-py2.py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+    ],
+)
+def test_wheelfile_re(filename, tmp_path):
+    # Regression test for #208 and #485
+    path = tmp_path.joinpath(filename)
+    with WheelFile(str(path), "w") as wf:
+        assert wf.parsed_filename.group("namever") == "foo-2"
+
+
+@pytest.mark.parametrize(
+    "filename",
+    [
+        "test.whl",
+        "test-1.0.whl",
+        "test-1.0-py2.whl",
+        "test-1.0-py2-none.whl",
+        "test-1.0-py2-none-any",
+        "test-1.0-py 2-none-any.whl",
+    ],
+)
+def test_bad_wheel_filename(filename):
+    exc = pytest.raises(WheelError, WheelFile, filename)
+    exc.match(f"^Bad wheel filename {filename!r}$")
+
+
+def test_missing_record(wheel_path):
+    with ZipFile(wheel_path, "w") as zf:
+        zf.writestr("hello/héllö.py", 'print("Héllö, w0rld!")\n')
+
+    exc = pytest.raises(WheelError, WheelFile, wheel_path)
+    exc.match("^Missing test-1.0.dist-info/RECORD file$")
+
+
+def test_unsupported_hash_algorithm(wheel_path):
+    with ZipFile(wheel_path, "w") as zf:
+        zf.writestr("hello/héllö.py", 'print("Héllö, w0rld!")\n')
+        zf.writestr(
+            "test-1.0.dist-info/RECORD",
+            "hello/héllö.py,sha000=bv-QV3RciQC2v3zL8Uvhd_arp40J5A9xmyubN34OVwo,25",
+        )
+
+    exc = pytest.raises(WheelError, WheelFile, wheel_path)
+    exc.match("^Unsupported hash algorithm: sha000$")
+
+
+@pytest.mark.parametrize(
+    "algorithm, digest",
+    [("md5", "4J-scNa2qvSgy07rS4at-Q"), ("sha1", "QjCnGu5Qucb6-vir1a6BVptvOA4")],
+    ids=["md5", "sha1"],
+)
+def test_weak_hash_algorithm(wheel_path, algorithm, digest):
+    hash_string = f"{algorithm}={digest}"
+    with ZipFile(wheel_path, "w") as zf:
+        zf.writestr("hello/héllö.py", 'print("Héllö, w0rld!")\n')
+        zf.writestr("test-1.0.dist-info/RECORD", f"hello/héllö.py,{hash_string},25")
+
+    exc = pytest.raises(WheelError, WheelFile, wheel_path)
+    exc.match(rf"^Weak hash algorithm \({algorithm}\) is not permitted by PEP 427$")
+
+
+@pytest.mark.parametrize(
+    "algorithm, digest",
+    [
+        ("sha256", "bv-QV3RciQC2v3zL8Uvhd_arp40J5A9xmyubN34OVwo"),
+        ("sha384", "cDXriAy_7i02kBeDkN0m2RIDz85w6pwuHkt2PZ4VmT2PQc1TZs8Ebvf6eKDFcD_S"),
+        (
+            "sha512",
+            "kdX9CQlwNt4FfOpOKO_X0pn_v1opQuksE40SrWtMyP1NqooWVWpzCE3myZTfpy8g2azZON_"
+            "iLNpWVxTwuDWqBQ",
+        ),
+    ],
+    ids=["sha256", "sha384", "sha512"],
+)
+def test_testzip(wheel_path, algorithm, digest):
+    hash_string = f"{algorithm}={digest}"
+    with ZipFile(wheel_path, "w") as zf:
+        zf.writestr("hello/héllö.py", 'print("Héllö, world!")\n')
+        zf.writestr("test-1.0.dist-info/RECORD", f"hello/héllö.py,{hash_string},25")
+
+    with WheelFile(wheel_path) as wf:
+        wf.testzip()
+
+
+def test_testzip_missing_hash(wheel_path):
+    with ZipFile(wheel_path, "w") as zf:
+        zf.writestr("hello/héllö.py", 'print("Héllö, world!")\n')
+        zf.writestr("test-1.0.dist-info/RECORD", "")
+
+    with WheelFile(wheel_path) as wf:
+        exc = pytest.raises(WheelError, wf.testzip)
+        exc.match("^No hash found for file 'hello/héllö.py'$")
+
+
+def test_testzip_bad_hash(wheel_path):
+    with ZipFile(wheel_path, "w") as zf:
+        zf.writestr("hello/héllö.py", 'print("Héllö, w0rld!")\n')
+        zf.writestr(
+            "test-1.0.dist-info/RECORD",
+            "hello/héllö.py,sha256=bv-QV3RciQC2v3zL8Uvhd_arp40J5A9xmyubN34OVwo,25",
+        )
+
+    with WheelFile(wheel_path) as wf:
+        exc = pytest.raises(WheelError, wf.testzip)
+        exc.match("^Hash mismatch for file 'hello/héllö.py'$")
+
+
+def test_write_str(wheel_path):
+    with WheelFile(wheel_path, "w") as wf:
+        wf.writestr("hello/héllö.py", 'print("Héllö, world!")\n')
+        wf.writestr("hello/h,ll,.py", 'print("Héllö, world!")\n')
+
+    with ZipFile(wheel_path, "r") as zf:
+        infolist = zf.infolist()
+        assert len(infolist) == 3
+        assert infolist[0].filename == "hello/héllö.py"
+        assert infolist[0].file_size == 25
+        assert infolist[1].filename == "hello/h,ll,.py"
+        assert infolist[1].file_size == 25
+        assert infolist[2].filename == "test-1.0.dist-info/RECORD"
+
+        record = zf.read("test-1.0.dist-info/RECORD")
+        assert record.decode("utf-8") == (
+            "hello/héllö.py,sha256=bv-QV3RciQC2v3zL8Uvhd_arp40J5A9xmyubN34OVwo,25\n"
+            '"hello/h,ll,.py",sha256=bv-QV3RciQC2v3zL8Uvhd_arp40J5A9xmyubN34OVwo,25\n'
+            "test-1.0.dist-info/RECORD,,\n"
+        )
+
+
+def test_timestamp(tmp_path_factory, wheel_path, monkeypatch):
+    # An environment variable can be used to influence the timestamp on
+    # TarInfo objects inside the zip.  See issue #143.
+    build_dir = tmp_path_factory.mktemp("build")
+    for filename in ("one", "two", "three"):
+        build_dir.joinpath(filename).write_text(filename + "\n", encoding="utf-8")
+
+    # The earliest date representable in TarInfos, 1980-01-01
+    monkeypatch.setenv("SOURCE_DATE_EPOCH", "315576060")
+
+    with WheelFile(wheel_path, "w") as wf:
+        wf.write_files(str(build_dir))
+
+    with ZipFile(wheel_path, "r") as zf:
+        for info in zf.infolist():
+            assert info.date_time[:3] == (1980, 1, 1)
+            assert info.compress_type == ZIP_DEFLATED
+
+
+@pytest.mark.skipif(
+    sys.platform == "win32", reason="Windows does not support UNIX-like permissions"
+)
+def test_attributes(tmp_path_factory, wheel_path):
+    # With the change from ZipFile.write() to .writestr(), we need to manually
+    # set member attributes.
+    build_dir = tmp_path_factory.mktemp("build")
+    files = (("foo", 0o644), ("bar", 0o755))
+    for filename, mode in files:
+        path = build_dir.joinpath(filename)
+        path.write_text(filename + "\n", encoding="utf-8")
+        path.chmod(mode)
+
+    with WheelFile(wheel_path, "w") as wf:
+        wf.write_files(str(build_dir))
+
+    with ZipFile(wheel_path, "r") as zf:
+        for filename, mode in files:
+            info = zf.getinfo(filename)
+            assert info.external_attr == (mode | stat.S_IFREG) << 16
+            assert info.compress_type == ZIP_DEFLATED
+
+        info = zf.getinfo("test-1.0.dist-info/RECORD")
+        assert info.external_attr == (0o664 | stat.S_IFREG) << 16
diff --git a/tests/testdata/abi3extension.dist/extension.c b/tests/testdata/abi3extension.dist/extension.c
new file mode 100644 (file)
index 0000000..a37c3fa
--- /dev/null
@@ -0,0 +1,2 @@
+#define Py_LIMITED_API 0x03020000
+#include <Python.h>
diff --git a/tests/testdata/abi3extension.dist/setup.cfg b/tests/testdata/abi3extension.dist/setup.cfg
new file mode 100644 (file)
index 0000000..9f6ff39
--- /dev/null
@@ -0,0 +1,2 @@
+[bdist_wheel]
+py_limited_api=cp32
diff --git a/tests/testdata/abi3extension.dist/setup.py b/tests/testdata/abi3extension.dist/setup.py
new file mode 100644 (file)
index 0000000..5962bd1
--- /dev/null
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+from setuptools import Extension, setup
+
+setup(
+    name="extension.dist",
+    version="0.1",
+    description="A testing distribution \N{SNOWMAN}",
+    ext_modules=[
+        Extension(name="extension", sources=["extension.c"], py_limited_api=True)
+    ],
+)
diff --git a/tests/testdata/commasinfilenames.dist/mypackage/__init__.py b/tests/testdata/commasinfilenames.dist/mypackage/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/testdata/commasinfilenames.dist/mypackage/data/1,2,3.txt b/tests/testdata/commasinfilenames.dist/mypackage/data/1,2,3.txt
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/testdata/commasinfilenames.dist/mypackage/data/__init__.py b/tests/testdata/commasinfilenames.dist/mypackage/data/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/testdata/commasinfilenames.dist/setup.py b/tests/testdata/commasinfilenames.dist/setup.py
new file mode 100644 (file)
index 0000000..a2783a3
--- /dev/null
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="testrepo",
+    version="0.1",
+    packages=["mypackage"],
+    description="A test package with commas in file names",
+    include_package_data=True,
+    package_data={"mypackage.data": ["*"]},
+)
diff --git a/tests/testdata/commasinfilenames.dist/testrepo-0.1.0/mypackage/__init__.py b/tests/testdata/commasinfilenames.dist/testrepo-0.1.0/mypackage/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/testdata/complex-dist/complexdist/__init__.py b/tests/testdata/complex-dist/complexdist/__init__.py
new file mode 100644 (file)
index 0000000..88aa7b7
--- /dev/null
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+
+def main():
+    return
diff --git a/tests/testdata/complex-dist/setup.py b/tests/testdata/complex-dist/setup.py
new file mode 100644 (file)
index 0000000..e0439d9
--- /dev/null
@@ -0,0 +1,24 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="complex-dist",
+    version="0.1",
+    description="Another testing distribution \N{SNOWMAN}",
+    long_description="Another testing distribution \N{SNOWMAN}",
+    author="Illustrious Author",
+    author_email="illustrious@example.org",
+    url="http://example.org/exemplary",
+    packages=["complexdist"],
+    setup_requires=["wheel", "setuptools"],
+    install_requires=["quux", "splort"],
+    extras_require={"simple": ["simple.dist"]},
+    tests_require=["foo", "bar>=10.0.0"],
+    entry_points={
+        "console_scripts": [
+            "complex-dist=complexdist:main",
+            "complex-dist2=complexdist:main",
+        ],
+    },
+)
diff --git a/tests/testdata/extension.dist/extension.c b/tests/testdata/extension.dist/extension.c
new file mode 100644 (file)
index 0000000..26403ef
--- /dev/null
@@ -0,0 +1,17 @@
+#include <Python.h>
+
+static PyMethodDef methods[] = {
+       { NULL, NULL, 0, NULL }
+};
+
+static struct PyModuleDef module_def = {
+       PyModuleDef_HEAD_INIT,
+       "extension",
+       "Dummy extension module",
+       -1,
+       methods
+};
+
+PyMODINIT_FUNC PyInit_extension(void) {
+       return PyModule_Create(&module_def);
+}
diff --git a/tests/testdata/extension.dist/setup.py b/tests/testdata/extension.dist/setup.py
new file mode 100644 (file)
index 0000000..9a6eed8
--- /dev/null
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from setuptools import Extension, setup
+
+setup(
+    name="extension.dist",
+    version="0.1",
+    description="A testing distribution \N{SNOWMAN}",
+    ext_modules=[Extension(name="extension", sources=["extension.c"])],
+)
diff --git a/tests/testdata/headers.dist/header.h b/tests/testdata/headers.dist/header.h
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/testdata/headers.dist/headersdist.py b/tests/testdata/headers.dist/headersdist.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/testdata/headers.dist/setup.cfg b/tests/testdata/headers.dist/setup.cfg
new file mode 100644 (file)
index 0000000..3c6e79c
--- /dev/null
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal=1
diff --git a/tests/testdata/headers.dist/setup.py b/tests/testdata/headers.dist/setup.py
new file mode 100644 (file)
index 0000000..6cf9b46
--- /dev/null
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="headers.dist",
+    version="0.1",
+    description="A distribution with headers",
+    headers=["header.h"],
+)
diff --git a/tests/testdata/macosx_minimal_system_version/libb.dylib b/tests/testdata/macosx_minimal_system_version/libb.dylib
new file mode 100644 (file)
index 0000000..25c9546
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/libb.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib.c b/tests/testdata/macosx_minimal_system_version/test_lib.c
new file mode 100644 (file)
index 0000000..dfa2268
--- /dev/null
@@ -0,0 +1,13 @@
+int num_of_letters(char* text){
+    int num = 0;
+    char * lett = text;
+    while (lett != 0){
+        if (*lett >= 'a' && *lett <= 'z'){
+            num += 1;
+        } else if (*lett >= 'A' && *lett <= 'Z'){
+            num += 1;
+        }
+        lett += 1;
+    }
+    return num;
+}
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_10.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_10.dylib
new file mode 100644 (file)
index 0000000..eaf1a94
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_10.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_10_10.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_10_10.dylib
new file mode 100644 (file)
index 0000000..229d115
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_10_10.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib
new file mode 100644 (file)
index 0000000..8f54387
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_10_386.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_10_fat.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_10_fat.dylib
new file mode 100644 (file)
index 0000000..6c09512
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_10_fat.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_14.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_14.dylib
new file mode 100644 (file)
index 0000000..c9024cc
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_14.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib
new file mode 100644 (file)
index 0000000..c85b716
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_14_386.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_14_fat.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_14_fat.dylib
new file mode 100644 (file)
index 0000000..4bb0940
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_14_fat.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_6.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_6.dylib
new file mode 100644 (file)
index 0000000..80401ee
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_6.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_6_386.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_6_386.dylib
new file mode 100644 (file)
index 0000000..1e48cd8
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_6_386.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_6_fat.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_6_fat.dylib
new file mode 100644 (file)
index 0000000..f4ffaee
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_6_fat.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib
new file mode 100755 (executable)
index 0000000..26ab109
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_10_9_universal2.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_11.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_11.dylib
new file mode 100644 (file)
index 0000000..80202c1
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_11.dylib differ
diff --git a/tests/testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib b/tests/testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib
new file mode 100644 (file)
index 0000000..5f7fd50
Binary files /dev/null and b/tests/testdata/macosx_minimal_system_version/test_lib_multiple_fat.dylib differ
diff --git a/tests/testdata/simple.dist/setup.py b/tests/testdata/simple.dist/setup.py
new file mode 100644 (file)
index 0000000..1e7a78a
--- /dev/null
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="simple.dist",
+    version="0.1",
+    description="A testing distribution \N{SNOWMAN}",
+    packages=["simpledist"],
+    extras_require={"voting": ["beaglevote"]},
+)
diff --git a/tests/testdata/simple.dist/simpledist/__init__.py b/tests/testdata/simple.dist/simpledist/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/testdata/test-1.0-py2.py3-none-any.whl b/tests/testdata/test-1.0-py2.py3-none-any.whl
new file mode 100644 (file)
index 0000000..cf1436c
Binary files /dev/null and b/tests/testdata/test-1.0-py2.py3-none-any.whl differ
diff --git a/tests/testdata/unicode.dist/setup.py b/tests/testdata/unicode.dist/setup.py
new file mode 100644 (file)
index 0000000..89c4be1
--- /dev/null
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from setuptools import setup
+
+setup(
+    name="unicode.dist",
+    version="0.1",
+    description="A testing distribution \N{SNOWMAN}",
+    packages=["unicodedist"],
+)
diff --git a/tests/testdata/unicode.dist/unicodedist/__init__.py b/tests/testdata/unicode.dist/unicodedist/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/tests/testdata/unicode.dist/unicodedist/åäö_日本語.py b/tests/testdata/unicode.dist/unicodedist/åäö_日本語.py
new file mode 100644 (file)
index 0000000..e69de29