From f0fd51d17798add078a066a2361e4ded2a770676 Mon Sep 17 00:00:00 2001 From: TizenOpenSource Date: Tue, 6 Feb 2024 12:49:26 +0900 Subject: [PATCH] Imported Upstream version 1.21.0 --- LICENSE.txt | 9 + PKG-INFO | 81 + README.md | 45 + pyproject.toml | 57 + scripts/update_licenses.py | 71 + src/hatchling/__about__.py | 1 + src/hatchling/__init__.py | 0 src/hatchling/__main__.py | 6 + src/hatchling/bridge/__init__.py | 0 src/hatchling/bridge/app.py | 110 ++ src/hatchling/build.py | 142 ++ src/hatchling/builders/__init__.py | 0 src/hatchling/builders/app.py | 199 +++ src/hatchling/builders/config.py | 930 ++++++++++ src/hatchling/builders/constants.py | 32 + src/hatchling/builders/custom.py | 54 + src/hatchling/builders/hooks/__init__.py | 0 src/hatchling/builders/hooks/custom.py | 41 + .../builders/hooks/plugin/__init__.py | 0 src/hatchling/builders/hooks/plugin/hooks.py | 15 + .../builders/hooks/plugin/interface.py | 135 ++ src/hatchling/builders/hooks/version.py | 71 + src/hatchling/builders/plugin/__init__.py | 0 src/hatchling/builders/plugin/hooks.py | 17 + src/hatchling/builders/plugin/interface.py | 432 +++++ src/hatchling/builders/sdist.py | 352 ++++ src/hatchling/builders/utils.py | 117 ++ src/hatchling/builders/wheel.py | 715 ++++++++ src/hatchling/cli/__init__.py | 28 + src/hatchling/cli/build/__init__.py | 108 ++ src/hatchling/cli/dep/__init__.py | 37 + src/hatchling/cli/metadata/__init__.py | 58 + src/hatchling/cli/version/__init__.py | 51 + src/hatchling/dep/__init__.py | 0 src/hatchling/dep/core.py | 132 ++ src/hatchling/licenses/__init__.py | 0 src/hatchling/licenses/parse.py | 93 + src/hatchling/licenses/supported.py | 554 ++++++ src/hatchling/metadata/__init__.py | 0 src/hatchling/metadata/core.py | 1561 +++++++++++++++++ src/hatchling/metadata/custom.py | 41 + src/hatchling/metadata/plugin/__init__.py | 0 src/hatchling/metadata/plugin/hooks.py | 14 + src/hatchling/metadata/plugin/interface.py | 66 + src/hatchling/metadata/spec.py | 314 ++++ src/hatchling/metadata/utils.py | 59 + src/hatchling/ouroboros.py | 50 + src/hatchling/plugin/__init__.py | 3 + src/hatchling/plugin/exceptions.py | 2 + src/hatchling/plugin/manager.py | 111 ++ src/hatchling/plugin/specs.py | 23 + src/hatchling/plugin/utils.py | 48 + src/hatchling/py.typed | 0 src/hatchling/utils/__init__.py | 0 src/hatchling/utils/constants.py | 2 + src/hatchling/utils/context.py | 170 ++ src/hatchling/utils/fs.py | 23 + src/hatchling/version/__init__.py | 0 src/hatchling/version/core.py | 58 + src/hatchling/version/scheme/__init__.py | 0 .../version/scheme/plugin/__init__.py | 0 src/hatchling/version/scheme/plugin/hooks.py | 14 + .../version/scheme/plugin/interface.py | 59 + src/hatchling/version/scheme/standard.py | 98 ++ src/hatchling/version/source/__init__.py | 0 src/hatchling/version/source/code.py | 64 + src/hatchling/version/source/env.py | 29 + .../version/source/plugin/__init__.py | 0 src/hatchling/version/source/plugin/hooks.py | 16 + .../version/source/plugin/interface.py | 69 + src/hatchling/version/source/regex.py | 29 + tests/__init__.py | 0 tests/downstream/datadogpy/data.json | 6 + tests/downstream/datadogpy/pyproject.toml | 58 + tests/downstream/hatch-showcase/data.json | 9 + tests/downstream/integrate.py | 265 +++ tests/downstream/requirements.txt | 5 + 77 files changed, 7959 insertions(+) create mode 100644 LICENSE.txt create mode 100644 PKG-INFO create mode 100644 README.md create mode 100644 pyproject.toml create mode 100644 scripts/update_licenses.py create mode 100644 src/hatchling/__about__.py create mode 100644 src/hatchling/__init__.py create mode 100644 src/hatchling/__main__.py create mode 100644 src/hatchling/bridge/__init__.py create mode 100644 src/hatchling/bridge/app.py create mode 100644 src/hatchling/build.py create mode 100644 src/hatchling/builders/__init__.py create mode 100644 src/hatchling/builders/app.py create mode 100644 src/hatchling/builders/config.py create mode 100644 src/hatchling/builders/constants.py create mode 100644 src/hatchling/builders/custom.py create mode 100644 src/hatchling/builders/hooks/__init__.py create mode 100644 src/hatchling/builders/hooks/custom.py create mode 100644 src/hatchling/builders/hooks/plugin/__init__.py create mode 100644 src/hatchling/builders/hooks/plugin/hooks.py create mode 100644 src/hatchling/builders/hooks/plugin/interface.py create mode 100644 src/hatchling/builders/hooks/version.py create mode 100644 src/hatchling/builders/plugin/__init__.py create mode 100644 src/hatchling/builders/plugin/hooks.py create mode 100644 src/hatchling/builders/plugin/interface.py create mode 100644 src/hatchling/builders/sdist.py create mode 100644 src/hatchling/builders/utils.py create mode 100644 src/hatchling/builders/wheel.py create mode 100644 src/hatchling/cli/__init__.py create mode 100644 src/hatchling/cli/build/__init__.py create mode 100644 src/hatchling/cli/dep/__init__.py create mode 100644 src/hatchling/cli/metadata/__init__.py create mode 100644 src/hatchling/cli/version/__init__.py create mode 100644 src/hatchling/dep/__init__.py create mode 100644 src/hatchling/dep/core.py create mode 100644 src/hatchling/licenses/__init__.py create mode 100644 src/hatchling/licenses/parse.py create mode 100644 src/hatchling/licenses/supported.py create mode 100644 src/hatchling/metadata/__init__.py create mode 100644 src/hatchling/metadata/core.py create mode 100644 src/hatchling/metadata/custom.py create mode 100644 src/hatchling/metadata/plugin/__init__.py create mode 100644 src/hatchling/metadata/plugin/hooks.py create mode 100644 src/hatchling/metadata/plugin/interface.py create mode 100644 src/hatchling/metadata/spec.py create mode 100644 src/hatchling/metadata/utils.py create mode 100644 src/hatchling/ouroboros.py create mode 100644 src/hatchling/plugin/__init__.py create mode 100644 src/hatchling/plugin/exceptions.py create mode 100644 src/hatchling/plugin/manager.py create mode 100644 src/hatchling/plugin/specs.py create mode 100644 src/hatchling/plugin/utils.py create mode 100644 src/hatchling/py.typed create mode 100644 src/hatchling/utils/__init__.py create mode 100644 src/hatchling/utils/constants.py create mode 100644 src/hatchling/utils/context.py create mode 100644 src/hatchling/utils/fs.py create mode 100644 src/hatchling/version/__init__.py create mode 100644 src/hatchling/version/core.py create mode 100644 src/hatchling/version/scheme/__init__.py create mode 100644 src/hatchling/version/scheme/plugin/__init__.py create mode 100644 src/hatchling/version/scheme/plugin/hooks.py create mode 100644 src/hatchling/version/scheme/plugin/interface.py create mode 100644 src/hatchling/version/scheme/standard.py create mode 100644 src/hatchling/version/source/__init__.py create mode 100644 src/hatchling/version/source/code.py create mode 100644 src/hatchling/version/source/env.py create mode 100644 src/hatchling/version/source/plugin/__init__.py create mode 100644 src/hatchling/version/source/plugin/hooks.py create mode 100644 src/hatchling/version/source/plugin/interface.py create mode 100644 src/hatchling/version/source/regex.py create mode 100644 tests/__init__.py create mode 100644 tests/downstream/datadogpy/data.json create mode 100644 tests/downstream/datadogpy/pyproject.toml create mode 100644 tests/downstream/hatch-showcase/data.json create mode 100644 tests/downstream/integrate.py create mode 100644 tests/downstream/requirements.txt diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..71bd98c --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2021-present Ofek Lev + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/PKG-INFO b/PKG-INFO new file mode 100644 index 0000000..05aff78 --- /dev/null +++ b/PKG-INFO @@ -0,0 +1,81 @@ +Metadata-Version: 2.1 +Name: hatchling +Version: 1.21.0 +Summary: Modern, extensible Python build backend +Project-URL: Homepage, https://hatch.pypa.io/latest/ +Project-URL: Sponsor, https://github.com/sponsors/ofek +Project-URL: History, https://hatch.pypa.io/dev/history/hatchling/ +Project-URL: Tracker, https://github.com/pypa/hatch/issues +Project-URL: Source, https://github.com/pypa/hatch/tree/master/backend +Author-email: Ofek Lev +License-Expression: MIT +License-File: LICENSE.txt +Keywords: build,hatch,packaging +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Build Tools +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.8 +Requires-Dist: editables>=0.3 +Requires-Dist: packaging>=21.3 +Requires-Dist: pathspec>=0.10.1 +Requires-Dist: pluggy>=1.0.0 +Requires-Dist: tomli>=1.2.2; python_version < '3.11' +Requires-Dist: trove-classifiers +Description-Content-Type: text/markdown + +# Hatchling + +
+ +Hatch logo + +| | | +| --- | --- | +| Package | [![PyPI - Version](https://img.shields.io/pypi/v/hatchling.svg?logo=pypi&label=PyPI&logoColor=gold)](https://pypi.org/project/hatchling/) [![PyPI - Downloads](https://img.shields.io/pypi/dm/hatchling.svg?color=blue&label=Downloads&logo=pypi&logoColor=gold)](https://pypi.org/project/hatchling/) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/hatchling.svg?logo=python&label=Python&logoColor=gold)](https://pypi.org/project/hatchling/) | +| Meta | [![Hatch project](https://img.shields.io/badge/%F0%9F%A5%9A-Hatch-4051b5.svg)](https://github.com/pypa/hatch) [![linting - Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![code style - Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![types - Mypy](https://img.shields.io/badge/types-Mypy-blue.svg)](https://github.com/python/mypy) [![License - MIT](https://img.shields.io/badge/license-MIT-9400d3.svg)](https://spdx.org/licenses/) [![GitHub Sponsors](https://img.shields.io/github/sponsors/ofek?logo=GitHub%20Sponsors&style=social)](https://github.com/sponsors/ofek) | + +
+ +----- + +This is the extensible, standards compliant build backend used by [Hatch](https://github.com/pypa/hatch). + +## Usage + +The following snippet must be present in your project's `pyproject.toml` file in order to use Hatchling as your build backend: + +```toml +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" +``` + +Then a build frontend like [pip](https://github.com/pypa/pip), [build](https://github.com/pypa/build), or Hatch itself can build or install your project automatically: + +```console +# install using pip +pip install /path/to/project + +# build +python -m build /path/to/project + +# build with Hatch +hatch build /path/to/project +``` + +## Documentation + +- [Project metadata](https://hatch.pypa.io/latest/config/metadata/) +- [Dependencies](https://hatch.pypa.io/latest/config/dependency/) +- [Packaging](https://hatch.pypa.io/latest/config/build/) diff --git a/README.md b/README.md new file mode 100644 index 0000000..2b45fef --- /dev/null +++ b/README.md @@ -0,0 +1,45 @@ +# Hatchling + +
+ +Hatch logo + +| | | +| --- | --- | +| Package | [![PyPI - Version](https://img.shields.io/pypi/v/hatchling.svg?logo=pypi&label=PyPI&logoColor=gold)](https://pypi.org/project/hatchling/) [![PyPI - Downloads](https://img.shields.io/pypi/dm/hatchling.svg?color=blue&label=Downloads&logo=pypi&logoColor=gold)](https://pypi.org/project/hatchling/) [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/hatchling.svg?logo=python&label=Python&logoColor=gold)](https://pypi.org/project/hatchling/) | +| Meta | [![Hatch project](https://img.shields.io/badge/%F0%9F%A5%9A-Hatch-4051b5.svg)](https://github.com/pypa/hatch) [![linting - Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![code style - Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![types - Mypy](https://img.shields.io/badge/types-Mypy-blue.svg)](https://github.com/python/mypy) [![License - MIT](https://img.shields.io/badge/license-MIT-9400d3.svg)](https://spdx.org/licenses/) [![GitHub Sponsors](https://img.shields.io/github/sponsors/ofek?logo=GitHub%20Sponsors&style=social)](https://github.com/sponsors/ofek) | + +
+ +----- + +This is the extensible, standards compliant build backend used by [Hatch](https://github.com/pypa/hatch). + +## Usage + +The following snippet must be present in your project's `pyproject.toml` file in order to use Hatchling as your build backend: + +```toml +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" +``` + +Then a build frontend like [pip](https://github.com/pypa/pip), [build](https://github.com/pypa/build), or Hatch itself can build or install your project automatically: + +```console +# install using pip +pip install /path/to/project + +# build +python -m build /path/to/project + +# build with Hatch +hatch build /path/to/project +``` + +## Documentation + +- [Project metadata](https://hatch.pypa.io/latest/config/metadata/) +- [Dependencies](https://hatch.pypa.io/latest/config/dependency/) +- [Packaging](https://hatch.pypa.io/latest/config/build/) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..934dc94 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,57 @@ +[build-system] +requires = [] +build-backend = 'hatchling.ouroboros' +backend-path = ['src'] + +[project] +name = "hatchling" +dynamic = ["version"] +description = "Modern, extensible Python build backend" +readme = "README.md" +license = "MIT" +requires-python = ">=3.8" +keywords = [ + "build", + "hatch", + "packaging", +] +authors = [ + { name = "Ofek Lev", email = "oss@ofek.dev" }, +] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Software Development :: Build Tools", + "Topic :: Software Development :: Libraries :: Python Modules", +] +dependencies = [ + "editables>=0.3", + "packaging>=21.3", + "pathspec>=0.10.1", + "pluggy>=1.0.0", + "tomli>=1.2.2; python_version < '3.11'", + "trove-classifiers", +] + +[project.urls] +Homepage = "https://hatch.pypa.io/latest/" +Sponsor = "https://github.com/sponsors/ofek" +History = "https://hatch.pypa.io/dev/history/hatchling/" +Tracker = "https://github.com/pypa/hatch/issues" +Source = "https://github.com/pypa/hatch/tree/master/backend" + +[project.scripts] +hatchling = "hatchling.cli:hatchling" + +[tool.hatch.version] +path = "src/hatchling/__about__.py" diff --git a/scripts/update_licenses.py b/scripts/update_licenses.py new file mode 100644 index 0000000..a7820b4 --- /dev/null +++ b/scripts/update_licenses.py @@ -0,0 +1,71 @@ +import json +import pathlib +import time +from contextlib import closing +from io import StringIO + +import httpx + +LATEST_API = 'https://api.github.com/repos/spdx/license-list-data/releases/latest' +LICENSES_URL = 'https://raw.githubusercontent.com/spdx/license-list-data/v{}/json/licenses.json' +EXCEPTIONS_URL = 'https://raw.githubusercontent.com/spdx/license-list-data/v{}/json/exceptions.json' + + +def download_data(url): + for _ in range(600): + try: + response = httpx.get(url) + response.raise_for_status() + except Exception: # noqa: BLE001 + time.sleep(1) + continue + else: + return json.loads(response.content.decode('utf-8')) + + message = 'Download failed' + raise ConnectionError(message) + + +def main(): + latest_version = download_data(LATEST_API)['tag_name'][1:] + + licenses = {} + for license_data in download_data(LICENSES_URL.format(latest_version))['licenses']: + license_id = license_data['licenseId'] + deprecated = license_data['isDeprecatedLicenseId'] + licenses[license_id.lower()] = {'id': license_id, 'deprecated': deprecated} + + exceptions = {} + for exception_data in download_data(EXCEPTIONS_URL.format(latest_version))['exceptions']: + exception_id = exception_data['licenseExceptionId'] + deprecated = exception_data['isDeprecatedLicenseId'] + exceptions[exception_id.lower()] = {'id': exception_id, 'deprecated': deprecated} + + project_root = pathlib.Path(__file__).resolve().parent.parent + data_file = project_root / 'src' / 'hatchling' / 'licenses' / 'supported.py' + + with closing(StringIO()) as file_contents: + file_contents.write( + f"""\ +from __future__ import annotations + +VERSION = {latest_version!r}\n\nLICENSES: dict[str, dict[str, str | bool]] = {{ +""" + ) + + for normalized_name, data in sorted(licenses.items()): + file_contents.write(f' {normalized_name!r}: {data!r},\n') + + file_contents.write('}\n\nEXCEPTIONS: dict[str, dict[str, str | bool]] = {\n') + + for normalized_name, data in sorted(exceptions.items()): + file_contents.write(f' {normalized_name!r}: {data!r},\n') + + file_contents.write('}\n') + + with data_file.open('w', encoding='utf-8') as f: + f.write(file_contents.getvalue()) + + +if __name__ == '__main__': + main() diff --git a/src/hatchling/__about__.py b/src/hatchling/__about__.py new file mode 100644 index 0000000..b74f54f --- /dev/null +++ b/src/hatchling/__about__.py @@ -0,0 +1 @@ +__version__ = '1.21.0' diff --git a/src/hatchling/__init__.py b/src/hatchling/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/__main__.py b/src/hatchling/__main__.py new file mode 100644 index 0000000..b528284 --- /dev/null +++ b/src/hatchling/__main__.py @@ -0,0 +1,6 @@ +import sys + +if __name__ == '__main__': + from hatchling.cli import hatchling + + sys.exit(hatchling()) diff --git a/src/hatchling/bridge/__init__.py b/src/hatchling/bridge/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/bridge/app.py b/src/hatchling/bridge/app.py new file mode 100644 index 0000000..095064d --- /dev/null +++ b/src/hatchling/bridge/app.py @@ -0,0 +1,110 @@ +from __future__ import annotations + +import os +import sys +from typing import Any + + +class Application: + """ + The way output is displayed can be [configured](../config/hatch.md#terminal) by users. + + !!! important + Never import this directly; Hatch judiciously decides if a type of plugin requires + the capabilities herein and will grant access via an attribute. + """ + + def __init__(self) -> None: + self.__verbosity = int(os.environ.get('HATCH_VERBOSE', '0')) - int(os.environ.get('HATCH_QUIET', '0')) + + @property + def verbosity(self) -> int: + """ + The verbosity level of the application, with 0 as the default. + """ + return self.__verbosity + + @staticmethod + def display(message: str = '', **kwargs: Any) -> None: # noqa: ARG004 + # Do not document + _display(message) + + def display_info(self, message: str = '', **kwargs: Any) -> None: # noqa: ARG002 + """ + Meant to be used for messages conveying basic information. + """ + if self.__verbosity >= 0: + _display(message) + + def display_waiting(self, message: str = '', **kwargs: Any) -> None: # noqa: ARG002 + """ + Meant to be used for messages shown before potentially time consuming operations. + """ + if self.__verbosity >= 0: + _display(message) + + def display_success(self, message: str = '', **kwargs: Any) -> None: # noqa: ARG002 + """ + Meant to be used for messages indicating some positive outcome. + """ + if self.__verbosity >= 0: + _display(message) + + def display_warning(self, message: str = '', **kwargs: Any) -> None: # noqa: ARG002 + """ + Meant to be used for messages conveying important information. + """ + if self.__verbosity >= -1: + _display(message) + + def display_error(self, message: str = '', **kwargs: Any) -> None: # noqa: ARG002 + """ + Meant to be used for messages indicating some unrecoverable error. + """ + if self.__verbosity >= -2: # noqa: PLR2004 + _display(message) + + def display_debug(self, message: str = '', level: int = 1, **kwargs: Any) -> None: # noqa: ARG002 + """ + Meant to be used for messages that are not useful for most user experiences. + The `level` option must be between 1 and 3 (inclusive). + """ + if not 1 <= level <= 3: # noqa: PLR2004 + error_message = 'Debug output can only have verbosity levels between 1 and 3 (inclusive)' + raise ValueError(error_message) + + if self.__verbosity >= level: + _display(message) + + def display_mini_header(self, message: str = '', **kwargs: Any) -> None: # noqa: ARG002 + if self.__verbosity >= 0: + _display(f'[{message}]') + + def abort(self, message: str = '', code: int = 1, **kwargs: Any) -> None: # noqa: ARG002 + """ + Terminate the program with the given return code. + """ + if message and self.__verbosity >= -2: # noqa: PLR2004 + _display(message) + + sys.exit(code) + + def get_safe_application(self) -> SafeApplication: + return SafeApplication(self) + + +class SafeApplication: + def __init__(self, app: Application) -> None: + self.abort = app.abort + self.verbosity = app.verbosity + self.display = app.display + self.display_info = app.display_info + self.display_error = app.display_error + self.display_success = app.display_success + self.display_waiting = app.display_waiting + self.display_warning = app.display_warning + self.display_debug = app.display_debug + self.display_mini_header = app.display_mini_header + + +_display = print diff --git a/src/hatchling/build.py b/src/hatchling/build.py new file mode 100644 index 0000000..6b3fc78 --- /dev/null +++ b/src/hatchling/build.py @@ -0,0 +1,142 @@ +from __future__ import annotations + +import os +from typing import Any + +__all__ = [ + 'build_editable', + 'build_sdist', + 'build_wheel', + 'get_requires_for_build_editable', + 'get_requires_for_build_sdist', + 'get_requires_for_build_wheel', +] +__all__ += ['__all__'] + + +def get_requires_for_build_sdist(config_settings: dict[str, Any] | None = None) -> list[str]: # noqa: ARG001 + """ + https://peps.python.org/pep-0517/#get-requires-for-build-sdist + """ + from hatchling.builders.sdist import SdistBuilder + + builder = SdistBuilder(os.getcwd()) + return builder.config.dependencies + + +def build_sdist(sdist_directory: str, config_settings: dict[str, Any] | None = None) -> str: # noqa: ARG001 + """ + https://peps.python.org/pep-0517/#build-sdist + """ + from hatchling.builders.sdist import SdistBuilder + + builder = SdistBuilder(os.getcwd()) + return os.path.basename(next(builder.build(directory=sdist_directory, versions=['standard']))) + + +def get_requires_for_build_wheel(config_settings: dict[str, Any] | None = None) -> list[str]: # noqa: ARG001 + """ + https://peps.python.org/pep-0517/#get-requires-for-build-wheel + """ + from hatchling.builders.wheel import WheelBuilder + + builder = WheelBuilder(os.getcwd()) + return builder.config.dependencies + + +def build_wheel( + wheel_directory: str, + config_settings: dict[str, Any] | None = None, # noqa: ARG001 + metadata_directory: str | None = None, # noqa: ARG001 +) -> str: + """ + https://peps.python.org/pep-0517/#build-wheel + """ + from hatchling.builders.wheel import WheelBuilder + + builder = WheelBuilder(os.getcwd()) + return os.path.basename(next(builder.build(directory=wheel_directory, versions=['standard']))) + + +def get_requires_for_build_editable(config_settings: dict[str, Any] | None = None) -> list[str]: # noqa: ARG001 + """ + https://peps.python.org/pep-0660/#get-requires-for-build-editable + """ + from hatchling.builders.wheel import WheelBuilder + + builder = WheelBuilder(os.getcwd()) + return builder.config.dependencies + + +def build_editable( + wheel_directory: str, + config_settings: dict[str, Any] | None = None, # noqa: ARG001 + metadata_directory: str | None = None, # noqa: ARG001 +) -> str: + """ + https://peps.python.org/pep-0660/#build-editable + """ + from hatchling.builders.wheel import WheelBuilder + + builder = WheelBuilder(os.getcwd()) + return os.path.basename(next(builder.build(directory=wheel_directory, versions=['editable']))) + + +# Any builder that has build-time hooks like Hatchling and setuptools cannot technically keep PEP 517's identical +# metadata promise e.g. C extensions would require different tags in the `WHEEL` file. Therefore, we consider the +# methods as mostly being for non-frontend tools like tox and dependency updaters. So Hatchling only writes the +# `METADATA` file to the metadata directory and continues to ignore that directory itself. +# +# An issue we encounter by supporting this metadata-only access is that for installations with pip the required +# dependencies of the project are read at this stage. This means that build hooks that add to the `dependencies` +# build data or modify the built wheel have no effect on what dependencies are or are not installed. +# +# There are legitimate use cases in which this is required, so we only define these when no pip build is detected. +# See: https://github.com/pypa/pip/blob/22.2.2/src/pip/_internal/operations/build/build_tracker.py#L41-L51 +# Example use case: https://github.com/pypa/hatch/issues/532 +if 'PIP_BUILD_TRACKER' not in os.environ: + __all__ += ['prepare_metadata_for_build_editable', 'prepare_metadata_for_build_wheel'] + + def prepare_metadata_for_build_wheel( + metadata_directory: str, + config_settings: dict[str, Any] | None = None, # noqa: ARG001 + ) -> str: + """ + https://peps.python.org/pep-0517/#prepare-metadata-for-build-wheel + """ + from hatchling.builders.wheel import WheelBuilder + + builder = WheelBuilder(os.getcwd()) + + directory = os.path.join(metadata_directory, f'{builder.artifact_project_id}.dist-info') + if not os.path.isdir(directory): + os.mkdir(directory) + + with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f: + f.write(builder.config.core_metadata_constructor(builder.metadata)) + + return os.path.basename(directory) + + def prepare_metadata_for_build_editable( + metadata_directory: str, + config_settings: dict[str, Any] | None = None, # noqa: ARG001 + ) -> str: + """ + https://peps.python.org/pep-0660/#prepare-metadata-for-build-editable + """ + from hatchling.builders.wheel import EDITABLES_MINIMUM_VERSION, WheelBuilder + + builder = WheelBuilder(os.getcwd()) + + directory = os.path.join(metadata_directory, f'{builder.artifact_project_id}.dist-info') + if not os.path.isdir(directory): + os.mkdir(directory) + + extra_dependencies = [] + if not builder.config.dev_mode_dirs and builder.config.dev_mode_exact: + extra_dependencies.append(f'editables~={EDITABLES_MINIMUM_VERSION}') + + with open(os.path.join(directory, 'METADATA'), 'w', encoding='utf-8') as f: + f.write(builder.config.core_metadata_constructor(builder.metadata, extra_dependencies=extra_dependencies)) + + return os.path.basename(directory) diff --git a/src/hatchling/builders/__init__.py b/src/hatchling/builders/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/builders/app.py b/src/hatchling/builders/app.py new file mode 100644 index 0000000..40073d9 --- /dev/null +++ b/src/hatchling/builders/app.py @@ -0,0 +1,199 @@ +from __future__ import annotations + +import os +import sys +from typing import Any, Callable + +from hatchling.builders.config import BuilderConfig +from hatchling.builders.plugin.interface import BuilderInterface + + +class AppBuilderConfig(BuilderConfig): + SUPPORTED_VERSIONS = ('3.11', '3.10', '3.9', '3.8', '3.7') + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + self.__scripts: list[str] | None = None + self.__python_version: str | None = None + self.__pyapp_version: str | None = None + + @property + def scripts(self) -> list[str]: + if self.__scripts is None: + known_scripts = self.builder.metadata.core.scripts + scripts = self.target_config.get('scripts', []) + + if not isinstance(scripts, list): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.scripts` must be an array' + raise TypeError(message) + + for i, script in enumerate(scripts, 1): + if not isinstance(script, str): + message = ( + f'Script #{i} of field `tool.hatch.build.targets.{self.plugin_name}.scripts` must be a string' + ) + raise TypeError(message) + + if script not in known_scripts: + message = f'Unknown script in field `tool.hatch.build.targets.{self.plugin_name}.scripts`: {script}' + raise ValueError(message) + + self.__scripts = sorted(set(scripts)) if scripts else list(known_scripts) + + return self.__scripts + + @property + def python_version(self) -> str: + if self.__python_version is None: + python_version = self.target_config.get('python-version', '') + + if not isinstance(python_version, str): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.python-version` must be a string' + raise TypeError(message) + + if not python_version and 'PYAPP_DISTRIBUTION_SOURCE' not in os.environ: + for supported_version in self.SUPPORTED_VERSIONS: + if self.builder.metadata.core.python_constraint.contains(supported_version): + python_version = supported_version + break + else: + message = 'Field `project.requires-python` is incompatible with the known distributions' + raise ValueError(message) + + self.__python_version = python_version + + return self.__python_version + + @property + def pyapp_version(self) -> str: + if self.__pyapp_version is None: + pyapp_version = self.target_config.get('pyapp-version', '') + + if not isinstance(pyapp_version, str): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.pyapp-version` must be a string' + raise TypeError(message) + + self.__pyapp_version = pyapp_version + + return self.__pyapp_version + + +class AppBuilder(BuilderInterface): + """ + Build applications + """ + + PLUGIN_NAME = 'app' + + def get_version_api(self) -> dict[str, Callable]: + return {'bootstrap': self.build_bootstrap} + + def get_default_versions(self) -> list[str]: # noqa: PLR6301 + return ['bootstrap'] + + def clean( # noqa: PLR6301 + self, + directory: str, + versions: list[str], # noqa: ARG002 + ) -> None: + import shutil + + app_dir = os.path.join(directory, 'app') + if os.path.isdir(app_dir): + shutil.rmtree(app_dir) + + def build_bootstrap( + self, + directory: str, + **build_data: Any, # noqa: ARG002 + ) -> str: + import shutil + import tempfile + + cargo_path = os.environ.get('CARGO', '') + if not cargo_path: + if not shutil.which('cargo'): + message = 'Executable `cargo` could not be found on PATH' + raise OSError(message) + + cargo_path = 'cargo' + + app_dir = os.path.join(directory, 'app') + if not os.path.isdir(app_dir): + os.makedirs(app_dir) + + on_windows = sys.platform == 'win32' + base_env = dict(os.environ) + base_env['PYAPP_PROJECT_NAME'] = self.metadata.name + base_env['PYAPP_PROJECT_VERSION'] = self.metadata.version + + if self.config.python_version: + base_env['PYAPP_PYTHON_VERSION'] = self.config.python_version + + # https://doc.rust-lang.org/cargo/reference/config.html#buildtarget + build_target = os.environ.get('CARGO_BUILD_TARGET', '') + + # This will determine whether we install from crates.io or build locally and is currently required for + # cross compilation: https://github.com/cross-rs/cross/issues/1215 + repo_path = os.environ.get('PYAPP_REPO', '') + + with tempfile.TemporaryDirectory() as temp_dir: + exe_name = 'pyapp.exe' if on_windows else 'pyapp' + if repo_path: + context_dir = repo_path + target_dir = os.path.join(temp_dir, 'build') + if build_target: + temp_exe_path = os.path.join(target_dir, build_target, 'release', exe_name) + else: + temp_exe_path = os.path.join(target_dir, 'release', exe_name) + install_command = [cargo_path, 'build', '--release', '--target-dir', target_dir] + else: + context_dir = temp_dir + temp_exe_path = os.path.join(temp_dir, 'bin', exe_name) + install_command = [cargo_path, 'install', 'pyapp', '--force', '--root', temp_dir] + if self.config.pyapp_version: + install_command.extend(['--version', self.config.pyapp_version]) + + if self.config.scripts: + for script in self.config.scripts: + env = dict(base_env) + env['PYAPP_EXEC_SPEC'] = self.metadata.core.scripts[script] + + self.cargo_build(install_command, cwd=context_dir, env=env) + + exe_stem = ( + f'{script}-{self.metadata.version}-{build_target}' + if build_target + else f'{script}-{self.metadata.version}' + ) + exe_path = os.path.join(app_dir, f'{exe_stem}.exe' if on_windows else exe_stem) + shutil.move(temp_exe_path, exe_path) + else: + self.cargo_build(install_command, cwd=context_dir, env=base_env) + + exe_stem = ( + f'{self.metadata.name}-{self.metadata.version}-{build_target}' + if build_target + else f'{self.metadata.name}-{self.metadata.version}' + ) + exe_path = os.path.join(app_dir, f'{exe_stem}.exe' if on_windows else exe_stem) + shutil.move(temp_exe_path, exe_path) + + return app_dir + + def cargo_build(self, *args: Any, **kwargs: Any) -> None: + import subprocess + + if self.app.verbosity < 0: + kwargs['stdout'] = subprocess.PIPE + kwargs['stderr'] = subprocess.STDOUT + + process = subprocess.run(*args, **kwargs) # noqa: PLW1510 + if process.returncode: + message = f'Compilation failed (code {process.returncode})' + raise OSError(message) + + @classmethod + def get_config_class(cls) -> type[AppBuilderConfig]: + return AppBuilderConfig diff --git a/src/hatchling/builders/config.py b/src/hatchling/builders/config.py new file mode 100644 index 0000000..c308ba8 --- /dev/null +++ b/src/hatchling/builders/config.py @@ -0,0 +1,930 @@ +from __future__ import annotations + +import os +from contextlib import contextmanager +from typing import TYPE_CHECKING, Any, Generator, TypeVar + +import pathspec + +from hatchling.builders.constants import DEFAULT_BUILD_DIRECTORY, EXCLUDED_DIRECTORIES, BuildEnvVars +from hatchling.builders.utils import normalize_inclusion_map, normalize_relative_directory, normalize_relative_path +from hatchling.metadata.utils import normalize_project_name +from hatchling.utils.fs import locate_file + +if TYPE_CHECKING: + from hatchling.builders.plugin.interface import BuilderInterface + + +class BuilderConfig: + def __init__( + self, + builder: BuilderInterface, + root: str, + plugin_name: str, + build_config: dict[str, Any], + target_config: dict[str, Any], + ) -> None: + self.__builder = builder + self.__root = root + self.__plugin_name = plugin_name + self.__build_config = build_config + self.__target_config = target_config + self.__hook_config: dict[str, Any] | None = None + self.__versions: list[str] | None = None + self.__dependencies: list[str] | None = None + self.__sources: dict[str, str] | None = None + self.__packages: list[str] | None = None + self.__only_include: dict[str, str] | None = None + self.__force_include: dict[str, str] | None = None + self.__vcs_exclusion_files: dict[str, list[str]] | None = None + + # Possible pathspec.GitIgnoreSpec + self.__include_spec: pathspec.GitIgnoreSpec | None = None + self.__exclude_spec: pathspec.GitIgnoreSpec | None = None + self.__artifact_spec: pathspec.GitIgnoreSpec | None = None + + # These are used to create the pathspecs and will never be `None` after the first match attempt + self.__include_patterns: list[str] | None = None + self.__exclude_patterns: list[str] | None = None + self.__artifact_patterns: list[str] | None = None + + # This is used when the only file selection is based on forced inclusion or build-time artifacts. This + # instructs to `exclude` every encountered path without doing pattern matching that matches everything. + self.__exclude_all: bool = False + + # Modified at build time + self.build_artifact_spec: pathspec.GitIgnoreSpec | None = None + self.build_force_include: dict[str, str] = {} + self.build_reserved_paths: set[str] = set() + + # Common options + self.__directory: str | None = None + self.__skip_excluded_dirs: bool | None = None + self.__ignore_vcs: bool | None = None + self.__only_packages: bool | None = None + self.__reproducible: bool | None = None + self.__dev_mode_dirs: list[str] | None = None + self.__dev_mode_exact: bool | None = None + self.__require_runtime_dependencies: bool | None = None + self.__require_runtime_features: list[str] | None = None + + @property + def builder(self) -> BuilderInterface: + return self.__builder + + @property + def root(self) -> str: + return self.__root + + @property + def plugin_name(self) -> str: + return self.__plugin_name + + @property + def build_config(self) -> dict[str, Any]: + return self.__build_config + + @property + def target_config(self) -> dict[str, Any]: + return self.__target_config + + def include_path(self, relative_path: str, *, explicit: bool = False, is_package: bool = True) -> bool: + return ( + self.path_is_build_artifact(relative_path) + or self.path_is_artifact(relative_path) + or ( + not (self.only_packages and not is_package) + and not self.path_is_excluded(relative_path) + and (explicit or self.path_is_included(relative_path)) + ) + ) + + def path_is_included(self, relative_path: str) -> bool: + if self.include_spec is None: + return True + + return self.include_spec.match_file(relative_path) + + def path_is_excluded(self, relative_path: str) -> bool: + if self.__exclude_all: + return True + + if self.exclude_spec is None: + return False + + return self.exclude_spec.match_file(relative_path) + + def path_is_artifact(self, relative_path: str) -> bool: + if self.artifact_spec is None: + return False + + return self.artifact_spec.match_file(relative_path) + + def path_is_build_artifact(self, relative_path: str) -> bool: + if self.build_artifact_spec is None: + return False + + return self.build_artifact_spec.match_file(relative_path) + + def path_is_reserved(self, relative_path: str) -> bool: + return relative_path in self.build_reserved_paths + + def directory_is_excluded(self, name: str, relative_path: str) -> bool: + if name in EXCLUDED_DIRECTORIES: + return True + + relative_directory = os.path.join(relative_path, name) + return ( + self.path_is_reserved(relative_directory) + # The trailing slash is necessary so e.g. `bar/` matches `foo/bar` + or (self.skip_excluded_dirs and self.path_is_excluded(f'{relative_directory}/')) + ) + + @property + def include_spec(self) -> pathspec.GitIgnoreSpec | None: + if self.__include_patterns is None: + if 'include' in self.target_config: + include_config = self.target_config + include_location = f'tool.hatch.build.targets.{self.plugin_name}.include' + else: + include_config = self.build_config + include_location = 'tool.hatch.build.include' + + all_include_patterns = [] + + include_patterns = include_config.get('include', self.default_include()) + if not isinstance(include_patterns, list): + message = f'Field `{include_location}` must be an array of strings' + raise TypeError(message) + + for i, include_pattern in enumerate(include_patterns, 1): + if not isinstance(include_pattern, str): + message = f'Pattern #{i} in field `{include_location}` must be a string' + raise TypeError(message) + + if not include_pattern: + message = f'Pattern #{i} in field `{include_location}` cannot be an empty string' + raise ValueError(message) + + all_include_patterns.append(include_pattern) + + # Matching only at the root requires a forward slash, back slashes do not work. As such, + # normalize to forward slashes for consistency. + all_include_patterns.extend(f"/{relative_path.replace(os.sep, '/')}/" for relative_path in self.packages) + + if all_include_patterns: + self.__include_spec = pathspec.GitIgnoreSpec.from_lines(all_include_patterns) + + self.__include_patterns = all_include_patterns + + return self.__include_spec + + @property + def exclude_spec(self) -> pathspec.GitIgnoreSpec | None: + if self.__exclude_patterns is None: + if 'exclude' in self.target_config: + exclude_config = self.target_config + exclude_location = f'tool.hatch.build.targets.{self.plugin_name}.exclude' + else: + exclude_config = self.build_config + exclude_location = 'tool.hatch.build.exclude' + + all_exclude_patterns = self.default_global_exclude() + + exclude_patterns = exclude_config.get('exclude', self.default_exclude()) + if not isinstance(exclude_patterns, list): + message = f'Field `{exclude_location}` must be an array of strings' + raise TypeError(message) + + for i, exclude_pattern in enumerate(exclude_patterns, 1): + if not isinstance(exclude_pattern, str): + message = f'Pattern #{i} in field `{exclude_location}` must be a string' + raise TypeError(message) + + if not exclude_pattern: + message = f'Pattern #{i} in field `{exclude_location}` cannot be an empty string' + raise ValueError(message) + + all_exclude_patterns.append(exclude_pattern) + + if not self.ignore_vcs: + all_exclude_patterns.extend(self.load_vcs_exclusion_patterns()) + + if all_exclude_patterns: + self.__exclude_spec = pathspec.GitIgnoreSpec.from_lines(all_exclude_patterns) + + self.__exclude_patterns = all_exclude_patterns + + return self.__exclude_spec + + @property + def artifact_spec(self) -> pathspec.GitIgnoreSpec | None: + if self.__artifact_patterns is None: + if 'artifacts' in self.target_config: + artifact_config = self.target_config + artifact_location = f'tool.hatch.build.targets.{self.plugin_name}.artifacts' + else: + artifact_config = self.build_config + artifact_location = 'tool.hatch.build.artifacts' + + all_artifact_patterns = [] + + artifact_patterns = artifact_config.get('artifacts', []) + if not isinstance(artifact_patterns, list): + message = f'Field `{artifact_location}` must be an array of strings' + raise TypeError(message) + + for i, artifact_pattern in enumerate(artifact_patterns, 1): + if not isinstance(artifact_pattern, str): + message = f'Pattern #{i} in field `{artifact_location}` must be a string' + raise TypeError(message) + + if not artifact_pattern: + message = f'Pattern #{i} in field `{artifact_location}` cannot be an empty string' + raise ValueError(message) + + all_artifact_patterns.append(artifact_pattern) + + if all_artifact_patterns: + self.__artifact_spec = pathspec.GitIgnoreSpec.from_lines(all_artifact_patterns) + + self.__artifact_patterns = all_artifact_patterns + + return self.__artifact_spec + + @property + def hook_config(self) -> dict[str, Any]: + if self.__hook_config is None: + hook_config: dict[str, dict[str, Any]] = {} + + global_hook_config = self.build_config.get('hooks', {}) + if not isinstance(global_hook_config, dict): + message = 'Field `tool.hatch.build.hooks` must be a table' + raise TypeError(message) + + for hook_name, config in global_hook_config.items(): + if not isinstance(config, dict): + message = f'Field `tool.hatch.build.hooks.{hook_name}` must be a table' + raise TypeError(message) + + hook_config.setdefault(hook_name, config) + + target_hook_config = self.target_config.get('hooks', {}) + if not isinstance(target_hook_config, dict): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.hooks` must be a table' + raise TypeError(message) + + for hook_name, config in target_hook_config.items(): + if not isinstance(config, dict): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.hooks.{hook_name}` must be a table' + raise TypeError(message) + + hook_config[hook_name] = config + + if not env_var_enabled(BuildEnvVars.NO_HOOKS): + all_hooks_enabled = env_var_enabled(BuildEnvVars.HOOKS_ENABLE) + final_hook_config = { + hook_name: config + for hook_name, config in hook_config.items() + if ( + all_hooks_enabled + or config.get('enable-by-default', True) + or env_var_enabled(f'{BuildEnvVars.HOOK_ENABLE_PREFIX}{hook_name.upper()}') + ) + } + else: + final_hook_config = {} + + self.__hook_config = final_hook_config + + return self.__hook_config + + @property + def directory(self) -> str: + if self.__directory is None: + if 'directory' in self.target_config: + directory = self.target_config['directory'] + if not isinstance(directory, str): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.directory` must be a string' + raise TypeError(message) + else: + directory = self.build_config.get('directory', DEFAULT_BUILD_DIRECTORY) + if not isinstance(directory, str): + message = 'Field `tool.hatch.build.directory` must be a string' + raise TypeError(message) + + self.__directory = self.normalize_build_directory(directory) + + return self.__directory + + @property + def skip_excluded_dirs(self) -> bool: + if self.__skip_excluded_dirs is None: + if 'skip-excluded-dirs' in self.target_config: + skip_excluded_dirs = self.target_config['skip-excluded-dirs'] + if not isinstance(skip_excluded_dirs, bool): + message = ( + f'Field `tool.hatch.build.targets.{self.plugin_name}.skip-excluded-dirs` must be a boolean' + ) + raise TypeError(message) + else: + skip_excluded_dirs = self.build_config.get('skip-excluded-dirs', False) + if not isinstance(skip_excluded_dirs, bool): + message = 'Field `tool.hatch.build.skip-excluded-dirs` must be a boolean' + raise TypeError(message) + + self.__skip_excluded_dirs = skip_excluded_dirs + + return self.__skip_excluded_dirs + + @property + def ignore_vcs(self) -> bool: + if self.__ignore_vcs is None: + if 'ignore-vcs' in self.target_config: + ignore_vcs = self.target_config['ignore-vcs'] + if not isinstance(ignore_vcs, bool): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.ignore-vcs` must be a boolean' + raise TypeError(message) + else: + ignore_vcs = self.build_config.get('ignore-vcs', False) + if not isinstance(ignore_vcs, bool): + message = 'Field `tool.hatch.build.ignore-vcs` must be a boolean' + raise TypeError(message) + + self.__ignore_vcs = ignore_vcs + + return self.__ignore_vcs + + @property + def require_runtime_dependencies(self) -> bool: + if self.__require_runtime_dependencies is None: + if 'require-runtime-dependencies' in self.target_config: + require_runtime_dependencies = self.target_config['require-runtime-dependencies'] + if not isinstance(require_runtime_dependencies, bool): + message = ( + f'Field `tool.hatch.build.targets.{self.plugin_name}.require-runtime-dependencies` ' + f'must be a boolean' + ) + raise TypeError(message) + else: + require_runtime_dependencies = self.build_config.get('require-runtime-dependencies', False) + if not isinstance(require_runtime_dependencies, bool): + message = 'Field `tool.hatch.build.require-runtime-dependencies` must be a boolean' + raise TypeError(message) + + self.__require_runtime_dependencies = require_runtime_dependencies + + return self.__require_runtime_dependencies + + @property + def require_runtime_features(self) -> list[str]: + if self.__require_runtime_features is None: + if 'require-runtime-features' in self.target_config: + features_config = self.target_config + features_location = f'tool.hatch.build.targets.{self.plugin_name}.require-runtime-features' + else: + features_config = self.build_config + features_location = 'tool.hatch.build.require-runtime-features' + + require_runtime_features = features_config.get('require-runtime-features', []) + if not isinstance(require_runtime_features, list): + message = f'Field `{features_location}` must be an array' + raise TypeError(message) + + all_features: dict[str, None] = {} + for i, raw_feature in enumerate(require_runtime_features, 1): + if not isinstance(raw_feature, str): + message = f'Feature #{i} of field `{features_location}` must be a string' + raise TypeError(message) + + if not raw_feature: + message = f'Feature #{i} of field `{features_location}` cannot be an empty string' + raise ValueError(message) + + feature = normalize_project_name(raw_feature) + if feature not in self.builder.metadata.core.optional_dependencies: + message = ( + f'Feature `{feature}` of field `{features_location}` is not defined in ' + f'field `project.optional-dependencies`' + ) + raise ValueError(message) + + all_features[feature] = None + + self.__require_runtime_features = list(all_features) + + return self.__require_runtime_features + + @property + def only_packages(self) -> bool: + """ + Whether or not the target should ignore non-artifact files that do not reside within a Python package. + """ + if self.__only_packages is None: + if 'only-packages' in self.target_config: + only_packages = self.target_config['only-packages'] + if not isinstance(only_packages, bool): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.only-packages` must be a boolean' + raise TypeError(message) + else: + only_packages = self.build_config.get('only-packages', False) + if not isinstance(only_packages, bool): + message = 'Field `tool.hatch.build.only-packages` must be a boolean' + raise TypeError(message) + + self.__only_packages = only_packages + + return self.__only_packages + + @property + def reproducible(self) -> bool: + """ + Whether or not the target should be built in a reproducible manner, defaulting to true. + """ + if self.__reproducible is None: + if 'reproducible' in self.target_config: + reproducible = self.target_config['reproducible'] + if not isinstance(reproducible, bool): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.reproducible` must be a boolean' + raise TypeError(message) + else: + reproducible = self.build_config.get('reproducible', True) + if not isinstance(reproducible, bool): + message = 'Field `tool.hatch.build.reproducible` must be a boolean' + raise TypeError(message) + + self.__reproducible = reproducible + + return self.__reproducible + + @property + def dev_mode_dirs(self) -> list[str]: + """ + Directories which must be added to Python's search path in + [dev mode](../config/environment/overview.md#dev-mode). + """ + if self.__dev_mode_dirs is None: + if 'dev-mode-dirs' in self.target_config: + dev_mode_dirs_config = self.target_config + dev_mode_dirs_location = f'tool.hatch.build.targets.{self.plugin_name}.dev-mode-dirs' + else: + dev_mode_dirs_config = self.build_config + dev_mode_dirs_location = 'tool.hatch.build.dev-mode-dirs' + + all_dev_mode_dirs = [] + + dev_mode_dirs = dev_mode_dirs_config.get('dev-mode-dirs', []) + if not isinstance(dev_mode_dirs, list): + message = f'Field `{dev_mode_dirs_location}` must be an array of strings' + raise TypeError(message) + + for i, dev_mode_dir in enumerate(dev_mode_dirs, 1): + if not isinstance(dev_mode_dir, str): + message = f'Directory #{i} in field `{dev_mode_dirs_location}` must be a string' + raise TypeError(message) + + if not dev_mode_dir: + message = f'Directory #{i} in field `{dev_mode_dirs_location}` cannot be an empty string' + raise ValueError(message) + + all_dev_mode_dirs.append(dev_mode_dir) + + self.__dev_mode_dirs = all_dev_mode_dirs + + return self.__dev_mode_dirs + + @property + def dev_mode_exact(self) -> bool: + if self.__dev_mode_exact is None: + if 'dev-mode-exact' in self.target_config: + dev_mode_exact = self.target_config['dev-mode-exact'] + if not isinstance(dev_mode_exact, bool): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.dev-mode-exact` must be a boolean' + raise TypeError(message) + else: + dev_mode_exact = self.build_config.get('dev-mode-exact', False) + if not isinstance(dev_mode_exact, bool): + message = 'Field `tool.hatch.build.dev-mode-exact` must be a boolean' + raise TypeError(message) + + self.__dev_mode_exact = dev_mode_exact + + return self.__dev_mode_exact + + @property + def versions(self) -> list[str]: + if self.__versions is None: + # Used as an ordered set + all_versions: dict[str, None] = {} + + versions = self.target_config.get('versions', []) + if not isinstance(versions, list): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.versions` must be an array of strings' + raise TypeError(message) + + for i, version in enumerate(versions, 1): + if not isinstance(version, str): + message = ( + f'Version #{i} in field `tool.hatch.build.targets.{self.plugin_name}.versions` must be a string' + ) + raise TypeError(message) + + if not version: + message = ( + f'Version #{i} in field `tool.hatch.build.targets.{self.plugin_name}.versions` ' + f'cannot be an empty string' + ) + raise ValueError(message) + + all_versions[version] = None + + if not all_versions: + default_versions = self.__builder.get_default_versions() + for version in default_versions: + all_versions[version] = None + else: + unknown_versions = set(all_versions) - set(self.__builder.get_version_api()) + if unknown_versions: + message = ( + f'Unknown versions in field `tool.hatch.build.targets.{self.plugin_name}.versions`: ' + f'{", ".join(map(str, sorted(unknown_versions)))}' + ) + raise ValueError(message) + + self.__versions = list(all_versions) + + return self.__versions + + @property + def dependencies(self) -> list[str]: + if self.__dependencies is None: + # Used as an ordered set + dependencies: dict[str, None] = {} + + target_dependencies = self.target_config.get('dependencies', []) + if not isinstance(target_dependencies, list): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.dependencies` must be an array' + raise TypeError(message) + + for i, dependency in enumerate(target_dependencies, 1): + if not isinstance(dependency, str): + message = ( + f'Dependency #{i} of field `tool.hatch.build.targets.{self.plugin_name}.dependencies` ' + f'must be a string' + ) + raise TypeError(message) + + dependencies[dependency] = None + + global_dependencies = self.build_config.get('dependencies', []) + if not isinstance(global_dependencies, list): + message = 'Field `tool.hatch.build.dependencies` must be an array' + raise TypeError(message) + + for i, dependency in enumerate(global_dependencies, 1): + if not isinstance(dependency, str): + message = f'Dependency #{i} of field `tool.hatch.build.dependencies` must be a string' + raise TypeError(message) + + dependencies[dependency] = None + + require_runtime_dependencies = self.require_runtime_dependencies + require_runtime_features = {feature: None for feature in self.require_runtime_features} + for hook_name, config in self.hook_config.items(): + hook_require_runtime_dependencies = config.get('require-runtime-dependencies', False) + if not isinstance(hook_require_runtime_dependencies, bool): + message = f'Option `require-runtime-dependencies` of build hook `{hook_name}` must be a boolean' + raise TypeError(message) + + if hook_require_runtime_dependencies: + require_runtime_dependencies = True + + hook_require_runtime_features = config.get('require-runtime-features', []) + if not isinstance(hook_require_runtime_features, list): + message = f'Option `require-runtime-features` of build hook `{hook_name}` must be an array' + raise TypeError(message) + + for i, raw_feature in enumerate(hook_require_runtime_features, 1): + if not isinstance(raw_feature, str): + message = ( + f'Feature #{i} of option `require-runtime-features` of build hook `{hook_name}` ' + f'must be a string' + ) + raise TypeError(message) + + if not raw_feature: + message = ( + f'Feature #{i} of option `require-runtime-features` of build hook `{hook_name}` ' + f'cannot be an empty string' + ) + raise ValueError(message) + + feature = normalize_project_name(raw_feature) + if feature not in self.builder.metadata.core.optional_dependencies: + message = ( + f'Feature `{feature}` of option `require-runtime-features` of build hook `{hook_name}` ' + f'is not defined in field `project.optional-dependencies`' + ) + raise ValueError(message) + + require_runtime_features[feature] = None + + hook_dependencies = config.get('dependencies', []) + if not isinstance(hook_dependencies, list): + message = f'Option `dependencies` of build hook `{hook_name}` must be an array' + raise TypeError(message) + + for i, dependency in enumerate(hook_dependencies, 1): + if not isinstance(dependency, str): + message = ( + f'Dependency #{i} of option `dependencies` of build hook `{hook_name}` must be a string' + ) + raise TypeError(message) + + dependencies[dependency] = None + + if require_runtime_dependencies: + for dependency in self.builder.metadata.core.dependencies: + dependencies[dependency] = None + + if require_runtime_features: + for feature in require_runtime_features: + for dependency in self.builder.metadata.core.optional_dependencies[feature]: + dependencies[dependency] = None + + self.__dependencies = list(dependencies) + + return self.__dependencies + + @property + def sources(self) -> dict[str, str]: + if self.__sources is None: + if 'sources' in self.target_config: + sources_config = self.target_config + sources_location = f'tool.hatch.build.targets.{self.plugin_name}.sources' + else: + sources_config = self.build_config + sources_location = 'tool.hatch.build.sources' + + sources = {} + + raw_sources = sources_config.get('sources', []) + if isinstance(raw_sources, list): + for i, source in enumerate(raw_sources, 1): + if not isinstance(source, str): + message = f'Source #{i} in field `{sources_location}` must be a string' + raise TypeError(message) + + if not source: + message = f'Source #{i} in field `{sources_location}` cannot be an empty string' + raise ValueError(message) + + sources[normalize_relative_directory(source)] = '' + elif isinstance(raw_sources, dict): + for source, path in raw_sources.items(): + if not isinstance(path, str): + message = f'Path for source `{source}` in field `{sources_location}` must be a string' + raise TypeError(message) + + normalized_path = normalize_relative_path(path) + if normalized_path == '.': + normalized_path = '' + else: + normalized_path += os.sep + + sources[normalize_relative_directory(source) if source else source] = normalized_path + else: + message = f'Field `{sources_location}` must be a mapping or array of strings' + raise TypeError(message) + + for relative_path in self.packages: + source, _package = os.path.split(relative_path) + if source and normalize_relative_directory(relative_path) not in sources: + sources[normalize_relative_directory(source)] = '' + + self.__sources = dict(sorted(sources.items())) + + return self.__sources + + @property + def packages(self) -> list[str]: + if self.__packages is None: + if 'packages' in self.target_config: + package_config = self.target_config + package_location = f'tool.hatch.build.targets.{self.plugin_name}.packages' + else: + package_config = self.build_config + package_location = 'tool.hatch.build.packages' + + packages = package_config.get('packages', self.default_packages()) + if not isinstance(packages, list): + message = f'Field `{package_location}` must be an array of strings' + raise TypeError(message) + + for i, package in enumerate(packages, 1): + if not isinstance(package, str): + message = f'Package #{i} in field `{package_location}` must be a string' + raise TypeError(message) + + if not package: + message = f'Package #{i} in field `{package_location}` cannot be an empty string' + raise ValueError(message) + + self.__packages = sorted(normalize_relative_path(package) for package in packages) + + return self.__packages + + @property + def force_include(self) -> dict[str, str]: + if self.__force_include is None: + if 'force-include' in self.target_config: + force_include_config = self.target_config + force_include_location = f'tool.hatch.build.targets.{self.plugin_name}.force-include' + else: + force_include_config = self.build_config + force_include_location = 'tool.hatch.build.force-include' + + force_include = force_include_config.get('force-include', {}) + if not isinstance(force_include, dict): + message = f'Field `{force_include_location}` must be a mapping' + raise TypeError(message) + + for i, (source, relative_path) in enumerate(force_include.items(), 1): + if not source: + message = f'Source #{i} in field `{force_include_location}` cannot be an empty string' + raise ValueError(message) + + if not isinstance(relative_path, str): + message = f'Path for source `{source}` in field `{force_include_location}` must be a string' + raise TypeError(message) + + if not relative_path: + message = ( + f'Path for source `{source}` in field `{force_include_location}` cannot be an empty string' + ) + raise ValueError(message) + + self.__force_include = normalize_inclusion_map(force_include, self.root) + + return self.__force_include + + @property + def only_include(self) -> dict[str, str]: + if self.__only_include is None: + if 'only-include' in self.target_config: + only_include_config = self.target_config + only_include_location = f'tool.hatch.build.targets.{self.plugin_name}.only-include' + else: + only_include_config = self.build_config + only_include_location = 'tool.hatch.build.only-include' + + only_include = only_include_config.get('only-include', self.default_only_include()) or self.packages + if not isinstance(only_include, list): + message = f'Field `{only_include_location}` must be an array' + raise TypeError(message) + + inclusion_map = {} + + for i, relative_path in enumerate(only_include, 1): + if not isinstance(relative_path, str): + message = f'Path #{i} in field `{only_include_location}` must be a string' + raise TypeError(message) + + normalized_path = normalize_relative_path(relative_path) + if not normalized_path or normalized_path.startswith(('~', '..')): + message = f'Path #{i} in field `{only_include_location}` must be relative: {relative_path}' + raise ValueError(message) + + if normalized_path in inclusion_map: + message = f'Duplicate path in field `{only_include_location}`: {normalized_path}' + raise ValueError(message) + + inclusion_map[normalized_path] = normalized_path + + self.__only_include = normalize_inclusion_map(inclusion_map, self.root) + + return self.__only_include + + def get_distribution_path(self, relative_path: str) -> str: + # src/foo/bar.py -> foo/bar.py + for source, replacement in self.sources.items(): + if not source: + return replacement + relative_path + + if relative_path.startswith(source): + return relative_path.replace(source, replacement, 1) + + return relative_path + + @property + def vcs_exclusion_files(self) -> dict[str, list[str]]: + if self.__vcs_exclusion_files is None: + exclusion_files: dict[str, list[str]] = {'git': [], 'hg': []} + + local_gitignore = locate_file(self.root, '.gitignore') + if local_gitignore is not None: + exclusion_files['git'].append(local_gitignore) + + local_hgignore = locate_file(self.root, '.hgignore') + if local_hgignore is not None: + exclusion_files['hg'].append(local_hgignore) + + self.__vcs_exclusion_files = exclusion_files + + return self.__vcs_exclusion_files + + def load_vcs_exclusion_patterns(self) -> list[str]: + patterns = [] + + # https://git-scm.com/docs/gitignore#_pattern_format + for exclusion_file in self.vcs_exclusion_files['git']: + with open(exclusion_file, encoding='utf-8') as f: + patterns.extend(f.readlines()) + + # https://linux.die.net/man/5/hgignore + for exclusion_file in self.vcs_exclusion_files['hg']: + with open(exclusion_file, encoding='utf-8') as f: + glob_mode = False + for line in f: + exact_line = line.strip() + if exact_line == 'syntax: glob': + glob_mode = True + continue + + if exact_line.startswith('syntax: '): + glob_mode = False + continue + + if glob_mode: + patterns.append(line) + + return patterns + + def normalize_build_directory(self, build_directory: str) -> str: + if not os.path.isabs(build_directory): + build_directory = os.path.join(self.root, build_directory) + + return os.path.normpath(build_directory) + + def default_include(self) -> list: # noqa: PLR6301 + return [] + + def default_exclude(self) -> list: # noqa: PLR6301 + return [] + + def default_packages(self) -> list: # noqa: PLR6301 + return [] + + def default_only_include(self) -> list: # noqa: PLR6301 + return [] + + def default_global_exclude(self) -> list[str]: # noqa: PLR6301 + patterns = ['*.py[cdo]', f'/{DEFAULT_BUILD_DIRECTORY}'] + patterns.sort() + return patterns + + def set_exclude_all(self) -> None: + self.__exclude_all = True + + def get_force_include(self) -> dict[str, str]: + force_include = self.force_include.copy() + force_include.update(self.build_force_include) + return force_include + + @contextmanager + def set_build_data(self, build_data: dict[str, Any]) -> Generator: + try: + # Include anything the hooks indicate + build_artifacts = build_data['artifacts'] + if build_artifacts: + self.build_artifact_spec = pathspec.GitIgnoreSpec.from_lines(build_artifacts) + + self.build_force_include.update(normalize_inclusion_map(build_data['force_include'], self.root)) + + for inclusion_map in (self.force_include, self.build_force_include): + for source, target in inclusion_map.items(): + # Ignore source + # old/ -> new/ + # old.ext -> new.ext + if source.startswith(f'{self.root}{os.sep}'): + self.build_reserved_paths.add(self.get_distribution_path(os.path.relpath(source, self.root))) + # Ignore target files only + # ../out.ext -> ../in.ext + elif os.path.isfile(source): + self.build_reserved_paths.add(self.get_distribution_path(target)) + + yield + finally: + self.build_artifact_spec = None + self.build_force_include.clear() + self.build_reserved_paths.clear() + + +def env_var_enabled(env_var: str, *, default: bool = False) -> bool: + if env_var in os.environ: + return os.environ[env_var] in {'1', 'true'} + + return default + + +BuilderConfigBound = TypeVar('BuilderConfigBound', bound=BuilderConfig) diff --git a/src/hatchling/builders/constants.py b/src/hatchling/builders/constants.py new file mode 100644 index 0000000..a695b91 --- /dev/null +++ b/src/hatchling/builders/constants.py @@ -0,0 +1,32 @@ +DEFAULT_BUILD_DIRECTORY = 'dist' + +EXCLUDED_DIRECTORIES = frozenset(( + # Python bytecode + '__pycache__', + # Git + '.git', + # Mercurial + '.hg', + # Hatch + '.hatch', + # tox + '.tox', + # nox + '.nox', + # Ruff + '.ruff_cache', + # pytest + '.pytest_cache', + # Mypy + '.mypy_cache', +)) + + +class BuildEnvVars: + LOCATION = 'HATCH_BUILD_LOCATION' + HOOKS_ONLY = 'HATCH_BUILD_HOOKS_ONLY' + NO_HOOKS = 'HATCH_BUILD_NO_HOOKS' + HOOKS_ENABLE = 'HATCH_BUILD_HOOKS_ENABLE' + HOOK_ENABLE_PREFIX = 'HATCH_BUILD_HOOK_ENABLE_' + CLEAN = 'HATCH_BUILD_CLEAN' + CLEAN_HOOKS_AFTER = 'HATCH_BUILD_CLEAN_HOOKS_AFTER' diff --git a/src/hatchling/builders/custom.py b/src/hatchling/builders/custom.py new file mode 100644 index 0000000..29c8031 --- /dev/null +++ b/src/hatchling/builders/custom.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +import os +from typing import TYPE_CHECKING, Any, Generic + +from hatchling.builders.plugin.interface import BuilderInterface +from hatchling.metadata.core import ProjectMetadata +from hatchling.plugin.manager import PluginManagerBound +from hatchling.plugin.utils import load_plugin_from_script +from hatchling.utils.constants import DEFAULT_BUILD_SCRIPT + +if TYPE_CHECKING: + from hatchling.bridge.app import Application + + +class CustomBuilder(Generic[PluginManagerBound]): + PLUGIN_NAME = 'custom' + + def __new__( # type: ignore + cls, + root: str, + plugin_manager: PluginManagerBound | None = None, + config: dict[str, Any] | None = None, + metadata: ProjectMetadata | None = None, + app: Application | None = None, + ) -> BuilderInterface: + project_metadata = ProjectMetadata(root, plugin_manager, config) + + target_config = project_metadata.hatch.build_targets.get(cls.PLUGIN_NAME, {}) + if not isinstance(target_config, dict): + message = f'Field `tool.hatch.build.targets.{cls.PLUGIN_NAME}` must be a table' + raise TypeError(message) + + build_script = target_config.get('path', DEFAULT_BUILD_SCRIPT) + if not isinstance(build_script, str): + message = f'Option `path` for builder `{cls.PLUGIN_NAME}` must be a string' + raise TypeError(message) + + if not build_script: + message = f'Option `path` for builder `{cls.PLUGIN_NAME}` must not be empty if defined' + raise ValueError(message) + + path = os.path.normpath(os.path.join(root, build_script)) + if not os.path.isfile(path): + message = f'Build script does not exist: {build_script}' + raise OSError(message) + + hook_class = load_plugin_from_script(path, build_script, BuilderInterface, 'builder') # type: ignore + hook = hook_class(root, plugin_manager=plugin_manager, config=config, metadata=metadata, app=app) + + # Always keep the name to avoid confusion + hook.PLUGIN_NAME = cls.PLUGIN_NAME + + return hook diff --git a/src/hatchling/builders/hooks/__init__.py b/src/hatchling/builders/hooks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/builders/hooks/custom.py b/src/hatchling/builders/hooks/custom.py new file mode 100644 index 0000000..1af0993 --- /dev/null +++ b/src/hatchling/builders/hooks/custom.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +import os +from typing import Any + +from hatchling.builders.hooks.plugin.interface import BuildHookInterface +from hatchling.plugin.utils import load_plugin_from_script +from hatchling.utils.constants import DEFAULT_BUILD_SCRIPT + + +class CustomBuildHook: + PLUGIN_NAME = 'custom' + + def __new__( # type: ignore + cls, + root: str, + config: dict[str, Any], + *args: Any, + **kwargs: Any, + ) -> BuildHookInterface: + build_script = config.get('path', DEFAULT_BUILD_SCRIPT) + if not isinstance(build_script, str): + message = f'Option `path` for build hook `{cls.PLUGIN_NAME}` must be a string' + raise TypeError(message) + + if not build_script: + message = f'Option `path` for build hook `{cls.PLUGIN_NAME}` must not be empty if defined' + raise ValueError(message) + + path = os.path.normpath(os.path.join(root, build_script)) + if not os.path.isfile(path): + message = f'Build script does not exist: {build_script}' + raise OSError(message) + + hook_class = load_plugin_from_script(path, build_script, BuildHookInterface, 'build_hook') + hook = hook_class(root, config, *args, **kwargs) + + # Always keep the name to avoid confusion + hook.PLUGIN_NAME = cls.PLUGIN_NAME + + return hook diff --git a/src/hatchling/builders/hooks/plugin/__init__.py b/src/hatchling/builders/hooks/plugin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/builders/hooks/plugin/hooks.py b/src/hatchling/builders/hooks/plugin/hooks.py new file mode 100644 index 0000000..53fb288 --- /dev/null +++ b/src/hatchling/builders/hooks/plugin/hooks.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +import typing + +from hatchling.builders.hooks.custom import CustomBuildHook +from hatchling.builders.hooks.version import VersionBuildHook +from hatchling.plugin import hookimpl + +if typing.TYPE_CHECKING: + from hatchling.builders.hooks.plugin.interface import BuildHookInterface + + +@hookimpl +def hatch_register_build_hook() -> list[type[BuildHookInterface]]: + return [CustomBuildHook, VersionBuildHook] # type: ignore diff --git a/src/hatchling/builders/hooks/plugin/interface.py b/src/hatchling/builders/hooks/plugin/interface.py new file mode 100644 index 0000000..25d4344 --- /dev/null +++ b/src/hatchling/builders/hooks/plugin/interface.py @@ -0,0 +1,135 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Generic, cast + +from hatchling.builders.config import BuilderConfigBound + +if TYPE_CHECKING: + from hatchling.bridge.app import Application + from hatchling.metadata.core import ProjectMetadata + + +class BuildHookInterface(Generic[BuilderConfigBound]): # no cov + """ + Example usage: + + ```python tab="plugin.py" + from hatchling.builders.hooks.plugin.interface import BuildHookInterface + + + class SpecialBuildHook(BuildHookInterface): + PLUGIN_NAME = 'special' + ... + ``` + + ```python tab="hooks.py" + from hatchling.plugin import hookimpl + + from .plugin import SpecialBuildHook + + + @hookimpl + def hatch_register_build_hook(): + return SpecialBuildHook + ``` + """ + + PLUGIN_NAME = '' + """The name used for selection.""" + + def __init__( + self, + root: str, + config: dict[str, Any], + build_config: BuilderConfigBound, + metadata: ProjectMetadata, + directory: str, + target_name: str, + app: Application | None = None, + ) -> None: + self.__root = root + self.__config = config + self.__build_config = build_config + self.__metadata = metadata + self.__directory = directory + self.__target_name = target_name + self.__app = app + + @property + def app(self) -> Application: + """ + An instance of [Application](../utilities.md#hatchling.bridge.app.Application). + """ + if self.__app is None: + from hatchling.bridge.app import Application + + self.__app = cast(Application, Application().get_safe_application()) + + return self.__app + + @property + def root(self) -> str: + """ + The root of the project tree. + """ + return self.__root + + @property + def config(self) -> dict[str, Any]: + """ + The cumulative hook configuration. + + ```toml config-example + [tool.hatch.build.hooks.] + [tool.hatch.build.targets..hooks.] + ``` + """ + return self.__config + + @property + def metadata(self) -> ProjectMetadata: + # Undocumented for now + return self.__metadata + + @property + def build_config(self) -> BuilderConfigBound: + """ + An instance of [BuilderConfig](../utilities.md#hatchling.builders.config.BuilderConfig). + """ + return self.__build_config + + @property + def directory(self) -> str: + """ + The build directory. + """ + return self.__directory + + @property + def target_name(self) -> str: + """ + The plugin name of the build target. + """ + return self.__target_name + + def clean(self, versions: list[str]) -> None: + """ + This occurs before the build process if the `-c`/`--clean` flag was passed to + the [`build`](../../cli/reference.md#hatch-build) command, or when invoking + the [`clean`](../../cli/reference.md#hatch-clean) command. + """ + + def initialize(self, version: str, build_data: dict[str, Any]) -> None: + """ + This occurs immediately before each build. + + Any modifications to the build data will be seen by the build target. + """ + + def finalize(self, version: str, build_data: dict[str, Any], artifact_path: str) -> None: + """ + This occurs immediately after each build and will not run if the `--hooks-only` flag + was passed to the [`build`](../../cli/reference.md#hatch-build) command. + + The build data will reflect any modifications done by the target during the build. + """ diff --git a/src/hatchling/builders/hooks/version.py b/src/hatchling/builders/hooks/version.py new file mode 100644 index 0000000..334c4c0 --- /dev/null +++ b/src/hatchling/builders/hooks/version.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +from typing import Any + +from hatchling.builders.hooks.plugin.interface import BuildHookInterface +from hatchling.version.core import VersionFile + + +class VersionBuildHook(BuildHookInterface): + PLUGIN_NAME = 'version' + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + self.__config_path: str | None = None + self.__config_template: str | None = None + self.__config_pattern: str | bool | None = None + + @property + def config_path(self) -> str: + if self.__config_path is None: + path = self.config.get('path', '') + if not isinstance(path, str): + message = f'Option `path` for build hook `{self.PLUGIN_NAME}` must be a string' + raise TypeError(message) + + if not path: + message = f'Option `path` for build hook `{self.PLUGIN_NAME}` is required' + raise ValueError(message) + + self.__config_path = path + + return self.__config_path + + @property + def config_template(self) -> str: + if self.__config_template is None: + template = self.config.get('template', '') + if not isinstance(template, str): + message = f'Option `template` for build hook `{self.PLUGIN_NAME}` must be a string' + raise TypeError(message) + + self.__config_template = template + + return self.__config_template + + @property + def config_pattern(self) -> str | bool: + if self.__config_pattern is None: + pattern = self.config.get('pattern', '') + if not isinstance(pattern, (str, bool)): + message = f'Option `pattern` for build hook `{self.PLUGIN_NAME}` must be a string or a boolean' + raise TypeError(message) + + self.__config_pattern = pattern + + return self.__config_pattern + + def initialize( + self, + version: str, # noqa: ARG002 + build_data: dict[str, Any], + ) -> None: + version_file = VersionFile(self.root, self.config_path) + if self.config_pattern: + version_file.read(pattern=self.config_pattern) + version_file.set_version(self.metadata.version) + else: + version_file.write(self.metadata.version, self.config_template) + + build_data['artifacts'].append(f'/{self.config_path}') diff --git a/src/hatchling/builders/plugin/__init__.py b/src/hatchling/builders/plugin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/builders/plugin/hooks.py b/src/hatchling/builders/plugin/hooks.py new file mode 100644 index 0000000..a43a5e9 --- /dev/null +++ b/src/hatchling/builders/plugin/hooks.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +import typing + +from hatchling.builders.app import AppBuilder +from hatchling.builders.custom import CustomBuilder +from hatchling.builders.sdist import SdistBuilder +from hatchling.builders.wheel import WheelBuilder +from hatchling.plugin import hookimpl + +if typing.TYPE_CHECKING: + from hatchling.builders.plugin.interface import BuilderInterface + + +@hookimpl +def hatch_register_builder() -> list[type[BuilderInterface]]: + return [AppBuilder, CustomBuilder, SdistBuilder, WheelBuilder] # type: ignore diff --git a/src/hatchling/builders/plugin/interface.py b/src/hatchling/builders/plugin/interface.py new file mode 100644 index 0000000..4fb206e --- /dev/null +++ b/src/hatchling/builders/plugin/interface.py @@ -0,0 +1,432 @@ +from __future__ import annotations + +import os +import re +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any, Callable, Generator, Generic, Iterable, cast + +from hatchling.builders.config import BuilderConfig, BuilderConfigBound, env_var_enabled +from hatchling.builders.constants import EXCLUDED_DIRECTORIES, BuildEnvVars +from hatchling.builders.utils import get_relative_path, safe_walk +from hatchling.plugin.manager import PluginManagerBound + +if TYPE_CHECKING: + from hatchling.bridge.app import Application + from hatchling.builders.hooks.plugin.interface import BuildHookInterface + from hatchling.metadata.core import ProjectMetadata + + +class IncludedFile: + __slots__ = ('path', 'relative_path', 'distribution_path') + + def __init__(self, path: str, relative_path: str, distribution_path: str) -> None: + self.path = path + self.relative_path = relative_path + self.distribution_path = distribution_path + + +class BuilderInterface(ABC, Generic[BuilderConfigBound, PluginManagerBound]): + """ + Example usage: + + ```python tab="plugin.py" + from hatchling.builders.plugin.interface import BuilderInterface + + + class SpecialBuilder(BuilderInterface): + PLUGIN_NAME = 'special' + ... + ``` + + ```python tab="hooks.py" + from hatchling.plugin import hookimpl + + from .plugin import SpecialBuilder + + + @hookimpl + def hatch_register_builder(): + return SpecialBuilder + ``` + """ + + PLUGIN_NAME = '' + """The name used for selection.""" + + def __init__( + self, + root: str, + plugin_manager: PluginManagerBound | None = None, + config: dict[str, Any] | None = None, + metadata: ProjectMetadata | None = None, + app: Application | None = None, + ) -> None: + self.__root = root + self.__plugin_manager = cast(PluginManagerBound, plugin_manager) + self.__raw_config = config + self.__metadata = metadata + self.__app = app + self.__config = cast(BuilderConfigBound, None) + self.__project_config: dict[str, Any] | None = None + self.__hatch_config: dict[str, Any] | None = None + self.__build_config: dict[str, Any] | None = None + self.__build_targets: list[str] | None = None + self.__target_config: dict[str, Any] | None = None + + # Metadata + self.__project_id: str | None = None + + def build( + self, + *, + directory: str | None = None, + versions: list[str] | None = None, + hooks_only: bool | None = None, + clean: bool | None = None, + clean_hooks_after: bool | None = None, + clean_only: bool | None = False, + ) -> Generator[str, None, None]: + # Fail early for invalid project metadata + self.metadata.validate_fields() + + if directory is None: + directory = ( + self.config.normalize_build_directory(os.environ[BuildEnvVars.LOCATION]) + if BuildEnvVars.LOCATION in os.environ + else self.config.directory + ) + + if not os.path.isdir(directory): + os.makedirs(directory) + + version_api = self.get_version_api() + + versions = versions or self.config.versions + if versions: + unknown_versions = set(versions) - set(version_api) + if unknown_versions: + message = ( + f'Unknown versions for target `{self.PLUGIN_NAME}`: {", ".join(map(str, sorted(unknown_versions)))}' + ) + raise ValueError(message) + + if hooks_only is None: + hooks_only = env_var_enabled(BuildEnvVars.HOOKS_ONLY) + + configured_build_hooks = self.get_build_hooks(directory) + build_hooks = list(configured_build_hooks.values()) + + if clean_only: + clean = True + elif clean is None: + clean = env_var_enabled(BuildEnvVars.CLEAN) + if clean: + if not hooks_only: + self.clean(directory, versions) + + for build_hook in build_hooks: + build_hook.clean(versions) + + if clean_only: + return + + if clean_hooks_after is None: + clean_hooks_after = env_var_enabled(BuildEnvVars.CLEAN_HOOKS_AFTER) + + for version in versions: + self.app.display_debug(f'Building `{self.PLUGIN_NAME}` version `{version}`') + + build_data = self.get_default_build_data() + self.set_build_data_defaults(build_data) + + # Allow inspection of configured build hooks and the order in which they run + build_data['build_hooks'] = tuple(configured_build_hooks) + + # Execute all `initialize` build hooks + for build_hook in build_hooks: + build_hook.initialize(version, build_data) + + if hooks_only: + self.app.display_debug(f'Only ran build hooks for `{self.PLUGIN_NAME}` version `{version}`') + continue + + # Build the artifact + with self.config.set_build_data(build_data): + artifact = version_api[version](directory, **build_data) + + # Execute all `finalize` build hooks + for build_hook in build_hooks: + build_hook.finalize(version, build_data, artifact) + + if clean_hooks_after: + for build_hook in build_hooks: + build_hook.clean([version]) + + yield artifact + + def recurse_included_files(self) -> Iterable[IncludedFile]: + """ + Returns a consistently generated series of file objects for every file that should be distributed. Each file + object has three `str` attributes: + + - `path` - the absolute path + - `relative_path` - the path relative to the project root; will be an empty string for external files + - `distribution_path` - the path to be distributed as + """ + yield from self.recurse_selected_project_files() + yield from self.recurse_forced_files(self.config.get_force_include()) + + def recurse_selected_project_files(self) -> Iterable[IncludedFile]: + if self.config.only_include: + yield from self.recurse_explicit_files(self.config.only_include) + else: + yield from self.recurse_project_files() + + def recurse_project_files(self) -> Iterable[IncludedFile]: + for root, dirs, files in safe_walk(self.root): + relative_path = get_relative_path(root, self.root) + + dirs[:] = sorted(d for d in dirs if not self.config.directory_is_excluded(d, relative_path)) + + files.sort() + is_package = '__init__.py' in files + for f in files: + relative_file_path = os.path.join(relative_path, f) + distribution_path = self.config.get_distribution_path(relative_file_path) + if self.config.path_is_reserved(distribution_path): + continue + + if self.config.include_path(relative_file_path, is_package=is_package): + yield IncludedFile( + os.path.join(root, f), relative_file_path, self.config.get_distribution_path(relative_file_path) + ) + + def recurse_forced_files(self, inclusion_map: dict[str, str]) -> Iterable[IncludedFile]: + for source, target_path in inclusion_map.items(): + external = not source.startswith(self.root) + if os.path.isfile(source): + yield IncludedFile( + source, + '' if external else os.path.relpath(source, self.root), + self.config.get_distribution_path(target_path), + ) + elif os.path.isdir(source): + for root, dirs, files in safe_walk(source): + relative_directory = get_relative_path(root, source) + + dirs[:] = sorted(d for d in dirs if d not in EXCLUDED_DIRECTORIES) + + files.sort() + for f in files: + relative_file_path = os.path.join(target_path, relative_directory, f) + distribution_path = self.config.get_distribution_path(relative_file_path) + if not self.config.path_is_reserved(distribution_path): + yield IncludedFile( + os.path.join(root, f), + '' if external else relative_file_path, + distribution_path, + ) + else: + msg = f'Forced include not found: {source}' + raise FileNotFoundError(msg) + + def recurse_explicit_files(self, inclusion_map: dict[str, str]) -> Iterable[IncludedFile]: + for source, target_path in inclusion_map.items(): + external = not source.startswith(self.root) + if os.path.isfile(source): + distribution_path = self.config.get_distribution_path(target_path) + if not self.config.path_is_reserved(distribution_path): + yield IncludedFile( + source, + '' if external else os.path.relpath(source, self.root), + self.config.get_distribution_path(target_path), + ) + elif os.path.isdir(source): + for root, dirs, files in safe_walk(source): + relative_directory = get_relative_path(root, source) + + dirs[:] = sorted(d for d in dirs if d not in EXCLUDED_DIRECTORIES) + + files.sort() + is_package = '__init__.py' in files + for f in files: + relative_file_path = os.path.join(target_path, relative_directory, f) + distribution_path = self.config.get_distribution_path(relative_file_path) + if self.config.path_is_reserved(distribution_path): + continue + + if self.config.include_path(relative_file_path, explicit=True, is_package=is_package): + yield IncludedFile( + os.path.join(root, f), '' if external else relative_file_path, distribution_path + ) + + @property + def root(self) -> str: + """ + The root of the project tree. + """ + return self.__root + + @property + def plugin_manager(self) -> PluginManagerBound: + if self.__plugin_manager is None: + from hatchling.plugin.manager import PluginManager + + self.__plugin_manager = PluginManager() + + return self.__plugin_manager + + @property + def metadata(self) -> ProjectMetadata: + if self.__metadata is None: + from hatchling.metadata.core import ProjectMetadata + + self.__metadata = ProjectMetadata(self.root, self.plugin_manager, self.__raw_config) + + return self.__metadata + + @property + def app(self) -> Application: + """ + An instance of [Application](../utilities.md#hatchling.bridge.app.Application). + """ + if self.__app is None: + from hatchling.bridge.app import Application + + self.__app = cast(Application, Application().get_safe_application()) + + return self.__app + + @property + def raw_config(self) -> dict[str, Any]: + if self.__raw_config is None: + self.__raw_config = self.metadata.config + + return self.__raw_config + + @property + def project_config(self) -> dict[str, Any]: + if self.__project_config is None: + self.__project_config = self.metadata.core.config + + return self.__project_config + + @property + def hatch_config(self) -> dict[str, Any]: + if self.__hatch_config is None: + self.__hatch_config = self.metadata.hatch.config + + return self.__hatch_config + + @property + def config(self) -> BuilderConfigBound: + """ + An instance of [BuilderConfig](../utilities.md#hatchling.builders.config.BuilderConfig). + """ + if self.__config is None: + self.__config = self.get_config_class()( + self, self.root, self.PLUGIN_NAME, self.build_config, self.target_config + ) + + return self.__config + + @property + def build_config(self) -> dict[str, Any]: + """ + ```toml config-example + [tool.hatch.build] + ``` + """ + if self.__build_config is None: + self.__build_config = self.metadata.hatch.build_config + + return self.__build_config + + @property + def target_config(self) -> dict[str, Any]: + """ + ```toml config-example + [tool.hatch.build.targets.] + ``` + """ + if self.__target_config is None: + target_config: dict[str, Any] = self.metadata.hatch.build_targets.get(self.PLUGIN_NAME, {}) + if not isinstance(target_config, dict): + message = f'Field `tool.hatch.build.targets.{self.PLUGIN_NAME}` must be a table' + raise TypeError(message) + + self.__target_config = target_config + + return self.__target_config + + @property + def project_id(self) -> str: + if self.__project_id is None: + self.__project_id = f'{self.normalize_file_name_component(self.metadata.core.name)}-{self.metadata.version}' + + return self.__project_id + + def get_build_hooks(self, directory: str) -> dict[str, BuildHookInterface]: + configured_build_hooks = {} + for hook_name, config in self.config.hook_config.items(): + build_hook = self.plugin_manager.build_hook.get(hook_name) + if build_hook is None: + from hatchling.plugin.exceptions import UnknownPluginError + + message = f'Unknown build hook: {hook_name}' + raise UnknownPluginError(message) + + configured_build_hooks[hook_name] = build_hook( + self.root, config, self.config, self.metadata, directory, self.PLUGIN_NAME, self.app + ) + + return configured_build_hooks + + @abstractmethod + def get_version_api(self) -> dict[str, Callable]: + """ + A mapping of `str` versions to a callable that is used for building. + Each callable must have the following signature: + + ```python + def ...(build_dir: str, build_data: dict) -> str: + ``` + + The return value must be the absolute path to the built artifact. + """ + + def get_default_versions(self) -> list[str]: + """ + A list of versions to build when users do not specify any, defaulting to all versions. + """ + return list(self.get_version_api()) + + def get_default_build_data(self) -> dict[str, Any]: # noqa: PLR6301 + """ + A mapping that can be modified by [build hooks](../build-hook/reference.md) to influence the behavior of builds. + """ + return {} + + def set_build_data_defaults(self, build_data: dict[str, Any]) -> None: # noqa: PLR6301 + build_data.setdefault('artifacts', []) + build_data.setdefault('force_include', {}) + + def clean(self, directory: str, versions: list[str]) -> None: + """ + Called before builds if the `-c`/`--clean` flag was passed to the + [`build`](../../cli/reference.md#hatch-build) command. + """ + + @classmethod + def get_config_class(cls) -> type[BuilderConfig]: + """ + Must return a subclass of [BuilderConfig](../utilities.md#hatchling.builders.config.BuilderConfig). + """ + return BuilderConfig + + @staticmethod + def normalize_file_name_component(file_name: str) -> str: + """ + https://peps.python.org/pep-0427/#escaping-and-unicode + """ + return re.sub(r'[^\w\d.]+', '_', file_name, flags=re.UNICODE) diff --git a/src/hatchling/builders/sdist.py b/src/hatchling/builders/sdist.py new file mode 100644 index 0000000..281b42a --- /dev/null +++ b/src/hatchling/builders/sdist.py @@ -0,0 +1,352 @@ +from __future__ import annotations + +import gzip +import os +import tarfile +import tempfile +from contextlib import closing +from copy import copy +from io import BytesIO +from time import time as get_current_timestamp +from typing import TYPE_CHECKING, Any, Callable + +from hatchling.builders.config import BuilderConfig +from hatchling.builders.plugin.interface import BuilderInterface +from hatchling.builders.utils import ( + get_reproducible_timestamp, + normalize_archive_path, + normalize_file_permissions, + normalize_relative_path, + replace_file, +) +from hatchling.metadata.spec import DEFAULT_METADATA_VERSION, get_core_metadata_constructors +from hatchling.utils.constants import DEFAULT_BUILD_SCRIPT, DEFAULT_CONFIG_FILE + +if TYPE_CHECKING: + from types import TracebackType + + +class SdistArchive: + def __init__(self, name: str, *, reproducible: bool) -> None: + """ + https://peps.python.org/pep-0517/#source-distributions + """ + self.name = name + self.reproducible = reproducible + self.timestamp: int | None = get_reproducible_timestamp() if reproducible else None + + raw_fd, self.path = tempfile.mkstemp(suffix='.tar.gz') + self.fd = os.fdopen(raw_fd, 'w+b') + self.gz = gzip.GzipFile(fileobj=self.fd, mode='wb', mtime=self.timestamp) + self.tf = tarfile.TarFile(fileobj=self.gz, mode='w', format=tarfile.PAX_FORMAT) + self.gettarinfo = lambda *args, **kwargs: self.normalize_tar_metadata(self.tf.gettarinfo(*args, **kwargs)) + + def create_file(self, contents: str | bytes, *relative_paths: str) -> None: + if not isinstance(contents, bytes): + contents = contents.encode('utf-8') + tar_info = tarfile.TarInfo(normalize_archive_path(os.path.join(self.name, *relative_paths))) + tar_info.size = len(contents) + if self.reproducible and self.timestamp is not None: + tar_info.mtime = self.timestamp + else: + tar_info.mtime = int(get_current_timestamp()) + + with closing(BytesIO(contents)) as buffer: + self.tf.addfile(tar_info, buffer) + + def normalize_tar_metadata(self, tar_info: tarfile.TarInfo) -> tarfile.TarInfo: + if not self.reproducible: + return tar_info + + tar_info = copy(tar_info) + tar_info.uid = 0 + tar_info.gid = 0 + tar_info.uname = '' + tar_info.gname = '' + tar_info.mode = normalize_file_permissions(tar_info.mode) + if self.timestamp is not None: + tar_info.mtime = self.timestamp + + return tar_info + + def __getattr__(self, name: str) -> Any: + attr = getattr(self.tf, name) + setattr(self, name, attr) + return attr + + def __enter__(self) -> SdistArchive: # noqa: PYI034 + return self + + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: + self.tf.close() + self.gz.close() + self.fd.close() + + +class SdistBuilderConfig(BuilderConfig): + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + self.__core_metadata_constructor: Callable[..., str] | None = None + self.__strict_naming: bool | None = None + self.__support_legacy: bool | None = None + + @property + def core_metadata_constructor(self) -> Callable[..., str]: + if self.__core_metadata_constructor is None: + core_metadata_version = self.target_config.get('core-metadata-version', DEFAULT_METADATA_VERSION) + if not isinstance(core_metadata_version, str): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.core-metadata-version` must be a string' + raise TypeError(message) + + constructors = get_core_metadata_constructors() + if core_metadata_version not in constructors: + message = ( + f'Unknown metadata version `{core_metadata_version}` for field ' + f'`tool.hatch.build.targets.{self.plugin_name}.core-metadata-version`. ' + f'Available: {", ".join(sorted(constructors))}' + ) + raise ValueError(message) + + self.__core_metadata_constructor = constructors[core_metadata_version] + + return self.__core_metadata_constructor + + @property + def strict_naming(self) -> bool: + if self.__strict_naming is None: + if 'strict-naming' in self.target_config: + strict_naming = self.target_config['strict-naming'] + if not isinstance(strict_naming, bool): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.strict-naming` must be a boolean' + raise TypeError(message) + else: + strict_naming = self.build_config.get('strict-naming', True) + if not isinstance(strict_naming, bool): + message = 'Field `tool.hatch.build.strict-naming` must be a boolean' + raise TypeError(message) + + self.__strict_naming = strict_naming + + return self.__strict_naming + + @property + def support_legacy(self) -> bool: + if self.__support_legacy is None: + self.__support_legacy = bool(self.target_config.get('support-legacy', False)) + + return self.__support_legacy + + +class SdistBuilder(BuilderInterface): + """ + Build an archive of the source files + """ + + PLUGIN_NAME = 'sdist' + + def get_version_api(self) -> dict[str, Callable]: + return {'standard': self.build_standard} + + def get_default_versions(self) -> list[str]: # noqa: PLR6301 + return ['standard'] + + def clean( # noqa: PLR6301 + self, + directory: str, + versions: list[str], # noqa: ARG002 + ) -> None: + for filename in os.listdir(directory): + if filename.endswith('.tar.gz'): + os.remove(os.path.join(directory, filename)) + + def build_standard(self, directory: str, **build_data: Any) -> str: + found_packages = set() + + with SdistArchive(self.artifact_project_id, reproducible=self.config.reproducible) as archive: + for included_file in self.recurse_included_files(): + if self.config.support_legacy: + possible_package, file_name = os.path.split(included_file.relative_path) + if file_name == '__init__.py': + found_packages.add(possible_package) + + tar_info = archive.gettarinfo( + included_file.path, + arcname=normalize_archive_path( + os.path.join(self.artifact_project_id, included_file.distribution_path) + ), + ) + + if tar_info.isfile(): + with open(included_file.path, 'rb') as f: + archive.addfile(tar_info, f) + else: # no cov + # TODO: Investigate if this is necessary (for symlinks, etc.) + archive.addfile(tar_info) + + archive.create_file( + self.config.core_metadata_constructor(self.metadata, extra_dependencies=build_data['dependencies']), + 'PKG-INFO', + ) + + if self.config.support_legacy: + archive.create_file( + self.construct_setup_py_file(sorted(found_packages), extra_dependencies=build_data['dependencies']), + 'setup.py', + ) + + target = os.path.join(directory, f'{self.artifact_project_id}.tar.gz') + + replace_file(archive.path, target) + return target + + @property + def artifact_project_id(self) -> str: + return ( + self.project_id + if self.config.strict_naming + else f'{self.normalize_file_name_component(self.metadata.core.raw_name)}-{self.metadata.version}' + ) + + def construct_setup_py_file(self, packages: list[str], extra_dependencies: tuple[()] = ()) -> str: + contents = '# -*- coding: utf-8 -*-\nfrom setuptools import setup\n\n' + + contents += 'setup(\n' + + contents += f' name={self.metadata.core.name!r},\n' + contents += f' version={self.metadata.version!r},\n' + + if self.metadata.core.description: + contents += f' description={self.metadata.core.description!r},\n' + + if self.metadata.core.readme: + contents += f' long_description={self.metadata.core.readme!r},\n' + + authors_data = self.metadata.core.authors_data + if authors_data['name']: + contents += f" author={', '.join(authors_data['name'])!r},\n" + if authors_data['email']: + contents += f" author_email={', '.join(authors_data['email'])!r},\n" + + maintainers_data = self.metadata.core.maintainers_data + if maintainers_data['name']: + contents += f" maintainer={', '.join(maintainers_data['name'])!r},\n" + if maintainers_data['email']: + contents += f" maintainer_email={', '.join(maintainers_data['email'])!r},\n" + + if self.metadata.core.classifiers: + contents += ' classifiers=[\n' + + for classifier in self.metadata.core.classifiers: + contents += f' {classifier!r},\n' + + contents += ' ],\n' + + dependencies = list(self.metadata.core.dependencies) + dependencies.extend(extra_dependencies) + if dependencies: + contents += ' install_requires=[\n' + + for raw_specifier in dependencies: + specifier = raw_specifier.replace("'", '"') + contents += f' {specifier!r},\n' + + contents += ' ],\n' + + if self.metadata.core.optional_dependencies: + contents += ' extras_require={\n' + + for option, specifiers in self.metadata.core.optional_dependencies.items(): + if not specifiers: + continue + + contents += f' {option!r}: [\n' + + for raw_specifier in specifiers: + specifier = raw_specifier.replace("'", '"') + contents += f' {specifier!r},\n' + + contents += ' ],\n' + + contents += ' },\n' + + if self.metadata.core.scripts or self.metadata.core.gui_scripts or self.metadata.core.entry_points: + contents += ' entry_points={\n' + + if self.metadata.core.scripts: + contents += " 'console_scripts': [\n" + + for name, object_ref in self.metadata.core.scripts.items(): + contents += f" '{name} = {object_ref}',\n" + + contents += ' ],\n' + + if self.metadata.core.gui_scripts: + contents += " 'gui_scripts': [\n" + + for name, object_ref in self.metadata.core.gui_scripts.items(): + contents += f" '{name} = {object_ref}',\n" + + contents += ' ],\n' + + if self.metadata.core.entry_points: + for group, entry_points in self.metadata.core.entry_points.items(): + contents += f' {group!r}: [\n' + + for name, object_ref in entry_points.items(): + contents += f" '{name} = {object_ref}',\n" + + contents += ' ],\n' + + contents += ' },\n' + + if packages: + src_layout = False + contents += ' packages=[\n' + + for package in packages: + if package.startswith(f'src{os.sep}'): + src_layout = True + contents += f" {package.replace(os.sep, '.')[4:]!r},\n" + else: + contents += f" {package.replace(os.sep, '.')!r},\n" + + contents += ' ],\n' + + if src_layout: + contents += " package_dir={'': 'src'},\n" + + contents += ')\n' + + return contents + + def get_default_build_data(self) -> dict[str, Any]: + force_include = {} + for filename in ['pyproject.toml', DEFAULT_CONFIG_FILE, DEFAULT_BUILD_SCRIPT]: + path = os.path.join(self.root, filename) + if os.path.exists(path): + force_include[path] = filename + build_data = {'force_include': force_include, 'dependencies': []} + + for exclusion_files in self.config.vcs_exclusion_files.values(): + for exclusion_file in exclusion_files: + force_include[exclusion_file] = os.path.basename(exclusion_file) + + readme_path = self.metadata.core.readme_path + if readme_path: + readme_path = normalize_relative_path(readme_path) + force_include[os.path.join(self.root, readme_path)] = readme_path + + license_files = self.metadata.core.license_files + if license_files: + for license_file in license_files: + relative_path = normalize_relative_path(license_file) + force_include[os.path.join(self.root, relative_path)] = relative_path + + return build_data + + @classmethod + def get_config_class(cls) -> type[SdistBuilderConfig]: + return SdistBuilderConfig diff --git a/src/hatchling/builders/utils.py b/src/hatchling/builders/utils.py new file mode 100644 index 0000000..598fe14 --- /dev/null +++ b/src/hatchling/builders/utils.py @@ -0,0 +1,117 @@ +from __future__ import annotations + +import os +import shutil +from base64 import urlsafe_b64encode +from typing import TYPE_CHECKING, Iterable + +if TYPE_CHECKING: + from zipfile import ZipInfo + + +def replace_file(src: str, dst: str) -> None: + try: + os.replace(src, dst) + # Happens when on different filesystems like /tmp or caused by layering in containers + except OSError: + shutil.copy2(src, dst) + os.remove(src) + + +def safe_walk(path: str) -> Iterable[tuple[str, list[str], list[str]]]: + seen = set() + for root, dirs, files in os.walk(path, followlinks=True): + stat = os.stat(root) + identifier = stat.st_dev, stat.st_ino + if identifier in seen: + del dirs[:] + continue + + seen.add(identifier) + yield root, dirs, files + + +def get_known_python_major_versions() -> map: + return map(str, sorted((2, 3))) + + +def get_relative_path(path: str, start: str) -> str: + relative_path = os.path.relpath(path, start) + + # First iteration of `os.walk` + if relative_path == '.': + return '' + + return relative_path + + +def normalize_relative_path(path: str) -> str: + return os.path.normpath(path).strip(os.sep) + + +def normalize_relative_directory(path: str) -> str: + return normalize_relative_path(path) + os.sep + + +def normalize_inclusion_map(inclusion_map: dict[str, str], root: str) -> dict[str, str]: + normalized_inclusion_map = {} + + for raw_source, relative_path in inclusion_map.items(): + source = os.path.expanduser(os.path.normpath(raw_source)) + if not os.path.isabs(source): + source = os.path.abspath(os.path.join(root, source)) + + normalized_inclusion_map[source] = normalize_relative_path(relative_path) + + return dict( + sorted( + normalized_inclusion_map.items(), + key=lambda item: (item[1].count(os.sep), item[1], item[0]), + ) + ) + + +def normalize_archive_path(path: str) -> str: + if os.sep != '/': + return path.replace(os.sep, '/') + + return path + + +def format_file_hash(digest: bytes) -> str: + # https://peps.python.org/pep-0427/#signed-wheel-files + return urlsafe_b64encode(digest).decode('ascii').rstrip('=') + + +def get_reproducible_timestamp() -> int: + """ + Returns an `int` derived from the `SOURCE_DATE_EPOCH` environment variable; see + https://reproducible-builds.org/specs/source-date-epoch/. + + The default value will always be: `1580601600` + """ + return int(os.environ.get('SOURCE_DATE_EPOCH', '1580601600')) + + +def normalize_file_permissions(st_mode: int) -> int: + """ + https://github.com/takluyver/flit/blob/6a2a8c6462e49f584941c667b70a6f48a7b3f9ab/flit_core/flit_core/common.py#L257 + + Normalize the permission bits in the st_mode field from stat to 644/755. + + Popular VCSs only track whether a file is executable or not. The exact + permissions can vary on systems with different umasks. Normalizing + to 644 (non executable) or 755 (executable) makes builds more reproducible. + """ + # Set 644 permissions, leaving higher bits of st_mode unchanged + new_mode = (st_mode | 0o644) & ~0o133 + if st_mode & 0o100: # no cov + new_mode |= 0o111 # Executable: 644 -> 755 + return new_mode + + +def set_zip_info_mode(zip_info: ZipInfo, mode: int = 0o644) -> None: + """ + https://github.com/takluyver/flit/commit/3889583719888aef9f28baaa010e698cb7884904 + """ + zip_info.external_attr = mode << 16 diff --git a/src/hatchling/builders/wheel.py b/src/hatchling/builders/wheel.py new file mode 100644 index 0000000..57845a5 --- /dev/null +++ b/src/hatchling/builders/wheel.py @@ -0,0 +1,715 @@ +from __future__ import annotations + +import csv +import hashlib +import os +import stat +import sys +import tempfile +import zipfile +from functools import cached_property +from io import StringIO +from typing import TYPE_CHECKING, Any, Callable, Iterable, NamedTuple, Sequence, Tuple, cast + +from hatchling.__about__ import __version__ +from hatchling.builders.config import BuilderConfig +from hatchling.builders.plugin.interface import BuilderInterface +from hatchling.builders.utils import ( + format_file_hash, + get_known_python_major_versions, + get_reproducible_timestamp, + normalize_archive_path, + normalize_file_permissions, + normalize_inclusion_map, + replace_file, + set_zip_info_mode, +) +from hatchling.metadata.spec import DEFAULT_METADATA_VERSION, get_core_metadata_constructors + +if TYPE_CHECKING: + from types import TracebackType + + from hatchling.builders.plugin.interface import IncludedFile + + +EDITABLES_MINIMUM_VERSION = '0.3' + +TIME_TUPLE = Tuple[int, int, int, int, int, int] + + +class FileSelectionOptions(NamedTuple): + include: list[str] + exclude: list[str] + packages: list[str] + only_include: list[str] + + +class RecordFile: + def __init__(self) -> None: + self.__file_obj = StringIO() + self.__writer = csv.writer(self.__file_obj, delimiter=',', quotechar='"', lineterminator='\n') + + def write(self, record: Iterable[Any]) -> None: + self.__writer.writerow(record) + + def construct(self) -> str: + return self.__file_obj.getvalue() + + def __enter__(self) -> RecordFile: # noqa: PYI034 + return self + + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: + self.__file_obj.close() + + +class WheelArchive: + def __init__(self, project_id: str, *, reproducible: bool) -> None: + """ + https://peps.python.org/pep-0427/#abstract + """ + self.metadata_directory = f'{project_id}.dist-info' + self.shared_data_directory = f'{project_id}.data' + self.time_tuple: TIME_TUPLE | None = None + + self.reproducible = reproducible + if self.reproducible: + self.time_tuple = self.get_reproducible_time_tuple() + else: + self.time_tuple = None + + raw_fd, self.path = tempfile.mkstemp(suffix='.whl') + self.fd = os.fdopen(raw_fd, 'w+b') + self.zf = zipfile.ZipFile(self.fd, 'w', compression=zipfile.ZIP_DEFLATED) + + @staticmethod + def get_reproducible_time_tuple() -> TIME_TUPLE: + from datetime import datetime, timezone + + d = datetime.fromtimestamp(get_reproducible_timestamp(), timezone.utc) + return d.year, d.month, d.day, d.hour, d.minute, d.second + + def add_file(self, included_file: IncludedFile) -> tuple[str, str, str]: + relative_path = normalize_archive_path(included_file.distribution_path) + file_stat = os.stat(included_file.path) + + if self.reproducible: + zip_info = zipfile.ZipInfo(relative_path, cast(TIME_TUPLE, self.time_tuple)) + + # https://github.com/takluyver/flit/pull/66 + new_mode = normalize_file_permissions(file_stat.st_mode) + set_zip_info_mode(zip_info, new_mode & 0xFFFF) + if stat.S_ISDIR(file_stat.st_mode): # no cov + zip_info.external_attr |= 0x10 + else: + zip_info = zipfile.ZipInfo.from_file(included_file.path, relative_path) + + zip_info.compress_type = zipfile.ZIP_DEFLATED + + hash_obj = hashlib.sha256() + with open(included_file.path, 'rb') as in_file, self.zf.open(zip_info, 'w') as out_file: + while True: + chunk = in_file.read(16384) + if not chunk: + break + + hash_obj.update(chunk) + out_file.write(chunk) + + hash_digest = format_file_hash(hash_obj.digest()) + return relative_path, f'sha256={hash_digest}', str(file_stat.st_size) + + def write_metadata(self, relative_path: str, contents: str | bytes) -> tuple[str, str, str]: + relative_path = f'{self.metadata_directory}/{normalize_archive_path(relative_path)}' + return self.write_file(relative_path, contents) + + def add_shared_file(self, shared_file: IncludedFile) -> tuple[str, str, str]: + shared_file.distribution_path = f'{self.shared_data_directory}/data/{shared_file.distribution_path}' + return self.add_file(shared_file) + + def add_extra_metadata_file(self, extra_metadata_file: IncludedFile) -> tuple[str, str, str]: + extra_metadata_file.distribution_path = ( + f'{self.metadata_directory}/extra_metadata/{extra_metadata_file.distribution_path}' + ) + return self.add_file(extra_metadata_file) + + def write_file(self, relative_path: str, contents: str | bytes) -> tuple[str, str, str]: + if not isinstance(contents, bytes): + contents = contents.encode('utf-8') + + time_tuple = self.time_tuple or (2020, 2, 2, 0, 0, 0) + zip_info = zipfile.ZipInfo(relative_path, time_tuple) + set_zip_info_mode(zip_info) + + hash_obj = hashlib.sha256(contents) + hash_digest = format_file_hash(hash_obj.digest()) + self.zf.writestr(zip_info, contents, compress_type=zipfile.ZIP_DEFLATED) + + return relative_path, f'sha256={hash_digest}', str(len(contents)) + + def __enter__(self) -> WheelArchive: # noqa: PYI034 + return self + + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: + self.zf.close() + self.fd.close() + + +class WheelBuilderConfig(BuilderConfig): + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + self.__core_metadata_constructor: Callable[..., str] | None = None + self.__shared_data: dict[str, str] | None = None + self.__extra_metadata: dict[str, str] | None = None + self.__strict_naming: bool | None = None + self.__macos_max_compat: bool | None = None + + @cached_property + def default_file_selection_options(self) -> FileSelectionOptions: + include = self.target_config.get('include', self.build_config.get('include', [])) + exclude = self.target_config.get('exclude', self.build_config.get('exclude', [])) + packages = self.target_config.get('packages', self.build_config.get('packages', [])) + only_include = self.target_config.get('only-include', self.build_config.get('only-include', [])) + + if include or packages or only_include: + return FileSelectionOptions(include, exclude, packages, only_include) + + for project_name in ( + self.builder.normalize_file_name_component(self.builder.metadata.core.raw_name), + self.builder.normalize_file_name_component(self.builder.metadata.core.name), + ): + if os.path.isfile(os.path.join(self.root, project_name, '__init__.py')): + normalized_project_name = self.get_raw_fs_path_name(self.root, project_name) + return FileSelectionOptions([], exclude, [normalized_project_name], []) + + if os.path.isfile(os.path.join(self.root, 'src', project_name, '__init__.py')): + normalized_project_name = self.get_raw_fs_path_name(os.path.join(self.root, 'src'), project_name) + return FileSelectionOptions([], exclude, [f'src/{normalized_project_name}'], []) + + module_file = f'{project_name}.py' + if os.path.isfile(os.path.join(self.root, module_file)): + normalized_project_name = self.get_raw_fs_path_name(self.root, module_file) + return FileSelectionOptions([], exclude, [], [module_file]) + + from glob import glob + + possible_namespace_packages = glob(os.path.join(self.root, '*', project_name, '__init__.py')) + if len(possible_namespace_packages) == 1: + relative_path = os.path.relpath(possible_namespace_packages[0], self.root) + namespace = relative_path.split(os.sep)[0] + return FileSelectionOptions([], exclude, [namespace], []) + + if self.bypass_selection or self.build_artifact_spec is not None or self.get_force_include(): + self.set_exclude_all() + return FileSelectionOptions([], exclude, [], []) + + message = ( + 'Unable to determine which files to ship inside the wheel using the following heuristics: ' + 'https://hatch.pypa.io/latest/plugins/builder/wheel/#default-file-selection\n\nAt least one ' + 'file selection option must be defined in the `tool.hatch.build.targets.wheel` table, see: ' + 'https://hatch.pypa.io/latest/config/build/\n\nAs an example, if you intend to ship a ' + 'directory named `foo` that resides within a `src` directory located at the root of your ' + 'project, you can define the following:\n\n[tool.hatch.build.targets.wheel]\n' + 'packages = ["src/foo"]' + ) + raise ValueError(message) + + def default_include(self) -> list[str]: + return self.default_file_selection_options.include + + def default_exclude(self) -> list[str]: + return self.default_file_selection_options.exclude + + def default_packages(self) -> list[str]: + return self.default_file_selection_options.packages + + def default_only_include(self) -> list[str]: + return self.default_file_selection_options.only_include + + @property + def core_metadata_constructor(self) -> Callable[..., str]: + if self.__core_metadata_constructor is None: + core_metadata_version = self.target_config.get('core-metadata-version', DEFAULT_METADATA_VERSION) + if not isinstance(core_metadata_version, str): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.core-metadata-version` must be a string' + raise TypeError(message) + + constructors = get_core_metadata_constructors() + if core_metadata_version not in constructors: + message = ( + f'Unknown metadata version `{core_metadata_version}` for field ' + f'`tool.hatch.build.targets.{self.plugin_name}.core-metadata-version`. ' + f'Available: {", ".join(sorted(constructors))}' + ) + raise ValueError(message) + + self.__core_metadata_constructor = constructors[core_metadata_version] + + return self.__core_metadata_constructor + + @property + def shared_data(self) -> dict[str, str]: + if self.__shared_data is None: + shared_data = self.target_config.get('shared-data', {}) + if not isinstance(shared_data, dict): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.shared-data` must be a mapping' + raise TypeError(message) + + for i, (source, relative_path) in enumerate(shared_data.items(), 1): + if not source: + message = ( + f'Source #{i} in field `tool.hatch.build.targets.{self.plugin_name}.shared-data` ' + f'cannot be an empty string' + ) + raise ValueError(message) + + if not isinstance(relative_path, str): + message = ( + f'Path for source `{source}` in field ' + f'`tool.hatch.build.targets.{self.plugin_name}.shared-data` must be a string' + ) + raise TypeError(message) + + if not relative_path: + message = ( + f'Path for source `{source}` in field ' + f'`tool.hatch.build.targets.{self.plugin_name}.shared-data` cannot be an empty string' + ) + raise ValueError(message) + + self.__shared_data = normalize_inclusion_map(shared_data, self.root) + + return self.__shared_data + + @property + def extra_metadata(self) -> dict[str, str]: + if self.__extra_metadata is None: + extra_metadata = self.target_config.get('extra-metadata', {}) + if not isinstance(extra_metadata, dict): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.extra-metadata` must be a mapping' + raise TypeError(message) + + for i, (source, relative_path) in enumerate(extra_metadata.items(), 1): + if not source: + message = ( + f'Source #{i} in field `tool.hatch.build.targets.{self.plugin_name}.extra-metadata` ' + f'cannot be an empty string' + ) + raise ValueError(message) + + if not isinstance(relative_path, str): + message = ( + f'Path for source `{source}` in field ' + f'`tool.hatch.build.targets.{self.plugin_name}.extra-metadata` must be a string' + ) + raise TypeError(message) + + if not relative_path: + message = ( + f'Path for source `{source}` in field ' + f'`tool.hatch.build.targets.{self.plugin_name}.extra-metadata` cannot be an empty string' + ) + raise ValueError(message) + + self.__extra_metadata = normalize_inclusion_map(extra_metadata, self.root) + + return self.__extra_metadata + + @property + def strict_naming(self) -> bool: + if self.__strict_naming is None: + if 'strict-naming' in self.target_config: + strict_naming = self.target_config['strict-naming'] + if not isinstance(strict_naming, bool): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.strict-naming` must be a boolean' + raise TypeError(message) + else: + strict_naming = self.build_config.get('strict-naming', True) + if not isinstance(strict_naming, bool): + message = 'Field `tool.hatch.build.strict-naming` must be a boolean' + raise TypeError(message) + + self.__strict_naming = strict_naming + + return self.__strict_naming + + @property + def macos_max_compat(self) -> bool: + if self.__macos_max_compat is None: + macos_max_compat = self.target_config.get('macos-max-compat', True) + if not isinstance(macos_max_compat, bool): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.macos-max-compat` must be a boolean' + raise TypeError(message) + + self.__macos_max_compat = macos_max_compat + + return self.__macos_max_compat + + @cached_property + def bypass_selection(self) -> bool: + bypass_selection = self.target_config.get('bypass-selection', False) + if not isinstance(bypass_selection, bool): + message = f'Field `tool.hatch.build.targets.{self.plugin_name}.bypass-selection` must be a boolean' + raise TypeError(message) + + return bypass_selection + + if sys.platform in {'darwin', 'win32'}: + + @staticmethod + def get_raw_fs_path_name(directory: str, name: str) -> str: + normalized = name.casefold() + entries = os.listdir(directory) + for entry in entries: + if entry.casefold() == normalized: + return entry + + return name # no cov + + else: + + @staticmethod + def get_raw_fs_path_name(directory: str, name: str) -> str: # noqa: ARG004 + return name + + +class WheelBuilder(BuilderInterface): + """ + Build a binary distribution (.whl file) + """ + + PLUGIN_NAME = 'wheel' + + def get_version_api(self) -> dict[str, Callable]: + return {'standard': self.build_standard, 'editable': self.build_editable} + + def get_default_versions(self) -> list[str]: # noqa: PLR6301 + return ['standard'] + + def clean( # noqa: PLR6301 + self, + directory: str, + versions: list[str], # noqa: ARG002 + ) -> None: + for filename in os.listdir(directory): + if filename.endswith('.whl'): + os.remove(os.path.join(directory, filename)) + + def build_standard(self, directory: str, **build_data: Any) -> str: + if 'tag' not in build_data: + if build_data['infer_tag']: + build_data['tag'] = self.get_best_matching_tag() + else: + build_data['tag'] = self.get_default_tag() + + with WheelArchive( + self.artifact_project_id, reproducible=self.config.reproducible + ) as archive, RecordFile() as records: + for included_file in self.recurse_included_files(): + record = archive.add_file(included_file) + records.write(record) + + self.write_data(archive, records, build_data, build_data['dependencies']) + + records.write((f'{archive.metadata_directory}/RECORD', '', '')) + archive.write_metadata('RECORD', records.construct()) + + target = os.path.join(directory, f"{self.artifact_project_id}-{build_data['tag']}.whl") + + replace_file(archive.path, target) + return target + + def build_editable(self, directory: str, **build_data: Any) -> str: + if self.config.dev_mode_dirs: + return self.build_editable_explicit(directory, **build_data) + + return self.build_editable_detection(directory, **build_data) + + def build_editable_detection(self, directory: str, **build_data: Any) -> str: + from editables import EditableProject + + build_data['tag'] = self.get_default_tag() + + with WheelArchive( + self.artifact_project_id, reproducible=self.config.reproducible + ) as archive, RecordFile() as records: + exposed_packages = {} + for included_file in self.recurse_selected_project_files(): + if not included_file.path.endswith('.py'): + continue + + relative_path = included_file.relative_path + distribution_path = included_file.distribution_path + path_parts = relative_path.split(os.sep) + + # Root file + if len(path_parts) == 1: # no cov + exposed_packages[os.path.splitext(relative_path)[0]] = os.path.join(self.root, relative_path) + continue + + # Root package + root_module = path_parts[0] + if distribution_path == relative_path: + exposed_packages[root_module] = os.path.join(self.root, root_module) + else: + distribution_module = distribution_path.split(os.sep)[0] + try: + exposed_packages[distribution_module] = os.path.join( + self.root, + f'{relative_path[:relative_path.index(distribution_path)]}{distribution_module}', + ) + except ValueError: + message = ( + 'Dev mode installations are unsupported when any path rewrite in the `sources` option ' + 'changes a prefix rather than removes it, see: ' + 'https://github.com/pfmoore/editables/issues/20' + ) + raise ValueError(message) from None + + editable_project = EditableProject(self.metadata.core.name, self.root) + + if self.config.dev_mode_exact: + for module, relative_path in exposed_packages.items(): + editable_project.map(module, relative_path) + else: + for relative_path in exposed_packages.values(): + editable_project.add_to_path(os.path.dirname(relative_path)) + + for raw_filename, content in sorted(editable_project.files()): + filename = raw_filename + if filename.endswith('.pth') and not filename.startswith('_'): + filename = f'_{filename}' + + record = archive.write_file(filename, content) + records.write(record) + + for included_file in self.recurse_forced_files(self.get_forced_inclusion_map(build_data)): + record = archive.add_file(included_file) + records.write(record) + + extra_dependencies = list(build_data['dependencies']) + for raw_dependency in editable_project.dependencies(): + dependency = raw_dependency + if dependency == 'editables': + dependency += f'~={EDITABLES_MINIMUM_VERSION}' + else: # no cov + pass + + extra_dependencies.append(dependency) + + self.write_data(archive, records, build_data, extra_dependencies) + + records.write((f'{archive.metadata_directory}/RECORD', '', '')) + archive.write_metadata('RECORD', records.construct()) + + target = os.path.join(directory, f"{self.artifact_project_id}-{build_data['tag']}.whl") + + replace_file(archive.path, target) + return target + + def build_editable_explicit(self, directory: str, **build_data: Any) -> str: + build_data['tag'] = self.get_default_tag() + + with WheelArchive( + self.artifact_project_id, reproducible=self.config.reproducible + ) as archive, RecordFile() as records: + directories = sorted( + os.path.normpath(os.path.join(self.root, relative_directory)) + for relative_directory in self.config.dev_mode_dirs + ) + + record = archive.write_file(f"_{self.metadata.core.name.replace('-', '_')}.pth", '\n'.join(directories)) + records.write(record) + + for included_file in self.recurse_forced_files(self.get_forced_inclusion_map(build_data)): + record = archive.add_file(included_file) + records.write(record) + + self.write_data(archive, records, build_data, build_data['dependencies']) + + records.write((f'{archive.metadata_directory}/RECORD', '', '')) + archive.write_metadata('RECORD', records.construct()) + + target = os.path.join(directory, f"{self.artifact_project_id}-{build_data['tag']}.whl") + + replace_file(archive.path, target) + return target + + def write_data( + self, archive: WheelArchive, records: RecordFile, build_data: dict[str, Any], extra_dependencies: Sequence[str] + ) -> None: + self.add_shared_data(archive, records) + + # Ensure metadata is written last, see https://peps.python.org/pep-0427/#recommended-archiver-features + self.write_metadata(archive, records, build_data, extra_dependencies=extra_dependencies) + + def add_shared_data(self, archive: WheelArchive, records: RecordFile) -> None: + for shared_file in self.recurse_explicit_files(self.config.shared_data): + record = archive.add_shared_file(shared_file) + records.write(record) + + def write_metadata( + self, + archive: WheelArchive, + records: RecordFile, + build_data: dict[str, Any], + extra_dependencies: Sequence[str] = (), + ) -> None: + # <<< IMPORTANT >>> + # Ensure calls are ordered by the number of path components followed by the name of the components + + # METADATA + self.write_project_metadata(archive, records, extra_dependencies=extra_dependencies) + + # WHEEL + self.write_archive_metadata(archive, records, build_data) + + # entry_points.txt + self.write_entry_points_file(archive, records) + + # licenses/ + self.add_licenses(archive, records) + + # extra_metadata/ - write last + self.add_extra_metadata(archive, records, build_data) + + @staticmethod + def write_archive_metadata(archive: WheelArchive, records: RecordFile, build_data: dict[str, Any]) -> None: + from packaging.tags import parse_tag + + metadata = f"""\ +Wheel-Version: 1.0 +Generator: hatchling {__version__} +Root-Is-Purelib: {'true' if build_data['pure_python'] else 'false'} +""" + + for tag in sorted(map(str, parse_tag(build_data['tag']))): + metadata += f'Tag: {tag}\n' + + record = archive.write_metadata('WHEEL', metadata) + records.write(record) + + def write_entry_points_file(self, archive: WheelArchive, records: RecordFile) -> None: + entry_points_file = self.construct_entry_points_file() + if entry_points_file: + record = archive.write_metadata('entry_points.txt', entry_points_file) + records.write(record) + + def write_project_metadata( + self, archive: WheelArchive, records: RecordFile, extra_dependencies: Sequence[str] = () + ) -> None: + record = archive.write_metadata( + 'METADATA', self.config.core_metadata_constructor(self.metadata, extra_dependencies=extra_dependencies) + ) + records.write(record) + + def add_licenses(self, archive: WheelArchive, records: RecordFile) -> None: + for relative_path in self.metadata.core.license_files: + license_file = os.path.normpath(os.path.join(self.root, relative_path)) + with open(license_file, 'rb') as f: + record = archive.write_metadata(f'licenses/{relative_path}', f.read()) + records.write(record) + + def add_extra_metadata(self, archive: WheelArchive, records: RecordFile, build_data: dict[str, Any]) -> None: + extra_metadata = dict(self.config.extra_metadata) + extra_metadata.update(normalize_inclusion_map(build_data['extra_metadata'], self.root)) + + for extra_metadata_file in self.recurse_explicit_files(extra_metadata): + record = archive.add_extra_metadata_file(extra_metadata_file) + records.write(record) + + def construct_entry_points_file(self) -> str: + core_metadata = self.metadata.core + metadata_file = '' + + if core_metadata.scripts: + metadata_file += '\n[console_scripts]\n' + for name, object_ref in core_metadata.scripts.items(): + metadata_file += f'{name} = {object_ref}\n' + + if core_metadata.gui_scripts: + metadata_file += '\n[gui_scripts]\n' + for name, object_ref in core_metadata.gui_scripts.items(): + metadata_file += f'{name} = {object_ref}\n' + + if core_metadata.entry_points: + for group, entry_points in core_metadata.entry_points.items(): + metadata_file += f'\n[{group}]\n' + for name, object_ref in entry_points.items(): + metadata_file += f'{name} = {object_ref}\n' + + return metadata_file.lstrip() + + def get_default_tag(self) -> str: + supported_python_versions = [] + for major_version in get_known_python_major_versions(): + for minor_version in range(100): + if self.metadata.core.python_constraint.contains(f'{major_version}.{minor_version}'): + supported_python_versions.append(f'py{major_version}') + break + + return f'{".".join(supported_python_versions)}-none-any' + + def get_best_matching_tag(self) -> str: + import sys + + from packaging.tags import sys_tags + + tag = next(sys_tags()) + tag_parts = [tag.interpreter, tag.abi, tag.platform] + + archflags = os.environ.get('ARCHFLAGS', '') + if sys.platform == 'darwin': + if archflags and sys.version_info[:2] >= (3, 8): + import platform + import re + + archs = re.findall(r'-arch (\S+)', archflags) + if archs: + plat = tag_parts[2] + current_arch = platform.mac_ver()[2] + new_arch = 'universal2' if set(archs) == {'x86_64', 'arm64'} else archs[0] + tag_parts[2] = f'{plat[:plat.rfind(current_arch)]}{new_arch}' + + if self.config.macos_max_compat: + import re + + plat = tag_parts[2] + sdk_match = re.search(r'macosx_(\d+_\d+)', plat) + if sdk_match: + sdk_version_part = sdk_match.group(1) + if tuple(map(int, sdk_version_part.split('_'))) >= (11, 0): + tag_parts[2] = plat.replace(sdk_version_part, '10_16', 1) + + return '-'.join(tag_parts) + + def get_default_build_data(self) -> dict[str, Any]: # noqa: PLR6301 + return { + 'infer_tag': False, + 'pure_python': True, + 'dependencies': [], + 'force_include_editable': {}, + 'extra_metadata': {}, + } + + def get_forced_inclusion_map(self, build_data: dict[str, Any]) -> dict[str, str]: + if not build_data['force_include_editable']: + return self.config.get_force_include() + + return normalize_inclusion_map(build_data['force_include_editable'], self.root) + + @property + def artifact_project_id(self) -> str: + return ( + self.project_id + if self.config.strict_naming + else f'{self.normalize_file_name_component(self.metadata.core.raw_name)}-{self.metadata.version}' + ) + + @classmethod + def get_config_class(cls) -> type[WheelBuilderConfig]: + return WheelBuilderConfig diff --git a/src/hatchling/cli/__init__.py b/src/hatchling/cli/__init__.py new file mode 100644 index 0000000..6d2a22b --- /dev/null +++ b/src/hatchling/cli/__init__.py @@ -0,0 +1,28 @@ +import argparse + +from hatchling.cli.build import build_command +from hatchling.cli.dep import dep_command +from hatchling.cli.metadata import metadata_command +from hatchling.cli.version import version_command + + +def hatchling() -> int: + parser = argparse.ArgumentParser(prog='hatchling', allow_abbrev=False) + subparsers = parser.add_subparsers() + + defaults = {'metavar': ''} + + build_command(subparsers, defaults) + dep_command(subparsers, defaults) + metadata_command(subparsers, defaults) + version_command(subparsers, defaults) + + kwargs = vars(parser.parse_args()) + try: + command = kwargs.pop('func') + except KeyError: + parser.print_help() + else: + command(**kwargs) + + return 0 diff --git a/src/hatchling/cli/build/__init__.py b/src/hatchling/cli/build/__init__.py new file mode 100644 index 0000000..91cadd4 --- /dev/null +++ b/src/hatchling/cli/build/__init__.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +import argparse +from typing import Any + + +def build_impl( + *, + called_by_app: bool, # noqa: ARG001 + directory: str, + targets: list[str], + hooks_only: bool, + no_hooks: bool, + clean: bool, + clean_hooks_after: bool, + clean_only: bool, +) -> None: + import os + + from hatchling.bridge.app import Application + from hatchling.builders.constants import BuildEnvVars + from hatchling.metadata.core import ProjectMetadata + from hatchling.plugin.manager import PluginManager + + app = Application() + + if hooks_only and no_hooks: + app.abort('Cannot use both --hooks-only and --no-hooks together') + + root = os.getcwd() + plugin_manager = PluginManager() + metadata = ProjectMetadata(root, plugin_manager) + + target_data: dict[str, Any] = {} + if targets: + for data in targets: + target_name, _, version_data = data.partition(':') + versions = version_data.split(',') if version_data else [] + target_data.setdefault(target_name, []).extend(versions) + else: # no cov + target_data['sdist'] = [] + target_data['wheel'] = [] + + builders = {} + unknown_targets = [] + for target_name in target_data: + builder_class = plugin_manager.builder.get(target_name) + if builder_class is None: + unknown_targets.append(target_name) + else: + builders[target_name] = builder_class + + if unknown_targets: + app.abort(f"Unknown build targets: {', '.join(sorted(unknown_targets))}") + + # We guarantee that builds occur within the project directory + root = os.getcwd() + + if no_hooks: + os.environ[BuildEnvVars.NO_HOOKS] = 'true' + + for i, (target_name, versions) in enumerate(target_data.items()): + # Separate targets with a blank line + if not clean_only and i != 0: # no cov + app.display_info() + + builder_class = builders[target_name] + + # Display name before instantiation in case of errors + if not clean_only and len(target_data) > 1: + app.display_mini_header(target_name) + + builder = builder_class(root, plugin_manager=plugin_manager, metadata=metadata, app=app.get_safe_application()) + + for artifact in builder.build( + directory=directory, + versions=versions, + hooks_only=hooks_only, + clean=clean, + clean_hooks_after=clean_hooks_after, + clean_only=clean_only, + ): + if os.path.isfile(artifact) and artifact.startswith(root): + app.display_info(os.path.relpath(artifact, root)) + else: # no cov + app.display_info(artifact) + + +def build_command(subparsers: argparse._SubParsersAction, defaults: Any) -> None: + parser = subparsers.add_parser('build') + parser.add_argument( + '-d', '--directory', dest='directory', help='The directory in which to build artifacts', **defaults + ) + parser.add_argument( + '-t', + '--target', + dest='targets', + action='append', + help='Comma-separated list of targets to build, overriding project defaults', + **defaults, + ) + parser.add_argument('--hooks-only', dest='hooks_only', action='store_true', default=None) + parser.add_argument('--no-hooks', dest='no_hooks', action='store_true', default=None) + parser.add_argument('-c', '--clean', dest='clean', action='store_true', default=None) + parser.add_argument('--clean-hooks-after', dest='clean_hooks_after', action='store_true', default=None) + parser.add_argument('--clean-only', dest='clean_only', action='store_true') + parser.add_argument('--app', dest='called_by_app', action='store_true', help=argparse.SUPPRESS) + parser.set_defaults(func=build_impl) diff --git a/src/hatchling/cli/dep/__init__.py b/src/hatchling/cli/dep/__init__.py new file mode 100644 index 0000000..234c846 --- /dev/null +++ b/src/hatchling/cli/dep/__init__.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + import argparse + + +def synced_impl(*, dependencies: list[str], python: str) -> None: + import subprocess + from ast import literal_eval + + from packaging.requirements import Requirement + + from hatchling.dep.core import dependencies_in_sync + + sys_path = None + if python: + output = subprocess.check_output([python, '-c', 'import sys;print([path for path in sys.path if path])']) + sys_path = literal_eval(output.strip().decode('utf-8')) + + sys.exit(0 if dependencies_in_sync(list(map(Requirement, dependencies)), sys_path) else 1) + + +def synced_command(subparsers: argparse._SubParsersAction, defaults: Any) -> None: + parser = subparsers.add_parser('synced') + parser.add_argument('dependencies', nargs='+') + parser.add_argument('-p', '--python', dest='python', **defaults) + parser.set_defaults(func=synced_impl) + + +def dep_command(subparsers: argparse._SubParsersAction, defaults: Any) -> None: + parser = subparsers.add_parser('dep') + subparsers = parser.add_subparsers() + + synced_command(subparsers, defaults) diff --git a/src/hatchling/cli/metadata/__init__.py b/src/hatchling/cli/metadata/__init__.py new file mode 100644 index 0000000..3469339 --- /dev/null +++ b/src/hatchling/cli/metadata/__init__.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +import argparse +from typing import Any + + +def metadata_impl( + *, + called_by_app: bool, # noqa: ARG001 + field: str, + compact: bool, +) -> None: + import json + import os + + from hatchling.bridge.app import Application + from hatchling.metadata.core import ProjectMetadata + from hatchling.metadata.utils import resolve_metadata_fields + from hatchling.plugin.manager import PluginManager + + app = Application() + + root = os.getcwd() + plugin_manager = PluginManager() + project_metadata = ProjectMetadata(root, plugin_manager) + + metadata = resolve_metadata_fields(project_metadata) + if field: # no cov + if field not in metadata: + app.abort(f'Unknown metadata field: {field}') + elif field == 'readme': + app.display(metadata[field]['text']) + elif isinstance(metadata[field], str): + app.display(metadata[field]) + else: + app.display(json.dumps(metadata[field], indent=4)) + + return + + for key, value in list(metadata.items()): + if not value: + metadata.pop(key) + + if compact: + app.display(json.dumps(metadata, separators=(',', ':'))) + else: # no cov + app.display(json.dumps(metadata, indent=4)) + + +def metadata_command( + subparsers: argparse._SubParsersAction, + defaults: Any, # noqa: ARG001 +) -> None: + parser = subparsers.add_parser('metadata') + parser.add_argument('field', nargs='?') + parser.add_argument('-c', '--compact', action='store_true') + parser.add_argument('--app', dest='called_by_app', action='store_true', help=argparse.SUPPRESS) + parser.set_defaults(func=metadata_impl) diff --git a/src/hatchling/cli/version/__init__.py b/src/hatchling/cli/version/__init__.py new file mode 100644 index 0000000..20cae30 --- /dev/null +++ b/src/hatchling/cli/version/__init__.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +import argparse +from typing import Any + + +def version_impl( + *, + called_by_app: bool, # noqa: ARG001 + desired_version: str, +) -> None: + import os + + from hatchling.bridge.app import Application + from hatchling.metadata.core import ProjectMetadata + from hatchling.plugin.manager import PluginManager + + app = Application() + + root = os.getcwd() + plugin_manager = PluginManager() + metadata = ProjectMetadata(root, plugin_manager) + + if 'version' in metadata.config.get('project', {}): + if desired_version: + app.abort('Cannot set version when it is statically defined by the `project.version` field') + else: + app.display(metadata.core.version) + return + + source = metadata.hatch.version.source + + version_data = source.get_version_data() + original_version = version_data['version'] + + if not desired_version: + app.display(original_version) + return + + updated_version = metadata.hatch.version.scheme.update(desired_version, original_version, version_data) + source.set_version(updated_version, version_data) + + app.display_info(f'Old: {original_version}') + app.display_info(f'New: {updated_version}') + + +def version_command(subparsers: argparse._SubParsersAction, defaults: Any) -> None: + parser = subparsers.add_parser('version') + parser.add_argument('desired_version', default='', nargs='?', **defaults) + parser.add_argument('--app', dest='called_by_app', action='store_true', help=argparse.SUPPRESS) + parser.set_defaults(func=version_impl) diff --git a/src/hatchling/dep/__init__.py b/src/hatchling/dep/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/dep/core.py b/src/hatchling/dep/core.py new file mode 100644 index 0000000..b54f41e --- /dev/null +++ b/src/hatchling/dep/core.py @@ -0,0 +1,132 @@ +from __future__ import annotations + +import re +import sys +from importlib.metadata import Distribution, DistributionFinder + +from packaging.markers import default_environment +from packaging.requirements import Requirement + + +class DistributionCache: + def __init__(self, sys_path: list[str]) -> None: + self._resolver = Distribution.discover(context=DistributionFinder.Context(path=sys_path)) + self._distributions: dict[str, Distribution] = {} + self._search_exhausted = False + self._canonical_regex = re.compile(r'[-_.]+') + + def __getitem__(self, item: str) -> Distribution | None: + item = self._canonical_regex.sub('-', item).lower() + possible_distribution = self._distributions.get(item) + if possible_distribution is not None: + return possible_distribution + + # Be safe even though the code as-is will never reach this since + # the first unknown distribution will fail fast + if self._search_exhausted: # no cov + return None + + for distribution in self._resolver: + name = distribution.metadata['Name'] + if name is None: + continue + + name = self._canonical_regex.sub('-', name).lower() + self._distributions[name] = distribution + if name == item: + return distribution + + self._search_exhausted = True + + return None + + +def dependency_in_sync( + requirement: Requirement, environment: dict[str, str], installed_distributions: DistributionCache +) -> bool: + if requirement.marker and not requirement.marker.evaluate(environment): + return True + + distribution = installed_distributions[requirement.name] + if distribution is None: + return False + + extras = requirement.extras + if extras: + transitive_requirements: list[str] = distribution.metadata.get_all('Requires-Dist', []) + if not transitive_requirements: + return False + + available_extras: list[str] = distribution.metadata.get_all('Provides-Extra', []) + + for requirement_string in transitive_requirements: + transitive_requirement = Requirement(requirement_string) + if not transitive_requirement.marker: + continue + + for extra in extras: + # FIXME: This may cause a build to never be ready if newer versions do not provide the desired + # extra and it's just a user error/typo. See: https://github.com/pypa/pip/issues/7122 + if extra not in available_extras: + return False + + extra_environment = dict(environment) + extra_environment['extra'] = extra + if not dependency_in_sync(transitive_requirement, extra_environment, installed_distributions): + return False + + if requirement.specifier and not requirement.specifier.contains(distribution.version): + return False + + # TODO: handle https://discuss.python.org/t/11938 + if requirement.url: + direct_url_file = distribution.read_text('direct_url.json') + if direct_url_file is not None: + import json + + # https://packaging.python.org/specifications/direct-url/ + direct_url_data = json.loads(direct_url_file) + if 'vcs_info' in direct_url_data: + url = direct_url_data['url'] + vcs_info = direct_url_data['vcs_info'] + vcs = vcs_info['vcs'] + commit_id = vcs_info['commit_id'] + requested_revision = vcs_info.get('requested_revision') + + # Try a few variations, see https://peps.python.org/pep-0440/#direct-references + if ( + requested_revision and requirement.url == f'{vcs}+{url}@{requested_revision}#{commit_id}' + ) or requirement.url == f'{vcs}+{url}@{commit_id}': + return True + + if requirement.url in {f'{vcs}+{url}', f'{vcs}+{url}@{requested_revision}'}: + import subprocess + + if vcs == 'git': + vcs_cmd = [vcs, 'ls-remote', url] + if requested_revision: + vcs_cmd.append(requested_revision) + # TODO: add elifs for hg, svn, and bzr https://github.com/pypa/hatch/issues/760 + else: + return False + result = subprocess.run(vcs_cmd, capture_output=True, text=True) # noqa: PLW1510 + if result.returncode: + return False + latest_commit_id, *_ = result.stdout.split() + return commit_id == latest_commit_id + + return False + + return True + + +def dependencies_in_sync( + requirements: list[Requirement], sys_path: list[str] | None = None, environment: dict[str, str] | None = None +) -> bool: + if sys_path is None: + sys_path = sys.path + if environment is None: + environment = default_environment() + + installed_distributions = DistributionCache(sys_path) + return all(dependency_in_sync(requirement, environment, installed_distributions) for requirement in requirements) diff --git a/src/hatchling/licenses/__init__.py b/src/hatchling/licenses/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/licenses/parse.py b/src/hatchling/licenses/parse.py new file mode 100644 index 0000000..29be5a4 --- /dev/null +++ b/src/hatchling/licenses/parse.py @@ -0,0 +1,93 @@ +from __future__ import annotations + +from typing import cast + +from hatchling.licenses.supported import EXCEPTIONS, LICENSES + + +def get_valid_licenses() -> dict[str, dict[str, str | bool]]: + valid_licenses = LICENSES.copy() + + # https://peps.python.org/pep-0639/#should-custom-license-identifiers-be-allowed + public_license = 'LicenseRef-Public-Domain' + valid_licenses[public_license.lower()] = {'id': public_license, 'deprecated': False} + + proprietary_license = 'LicenseRef-Proprietary' + valid_licenses[proprietary_license.lower()] = {'id': proprietary_license, 'deprecated': False} + + return valid_licenses + + +def normalize_license_expression(raw_license_expression: str) -> str: + if not raw_license_expression: + return raw_license_expression + + valid_licenses = get_valid_licenses() + + # First normalize to lower case so we can look up licenses/exceptions + # and so boolean operators are Python-compatible + license_expression = raw_license_expression.lower() + + # Then pad parentheses so tokenization can be achieved by merely splitting on white space + license_expression = license_expression.replace('(', ' ( ').replace(')', ' ) ') + + # Now we begin parsing + tokens = license_expression.split() + + # Rather than implementing boolean logic we create an expression that Python can parse. + # Everything that is not involved with the grammar itself is treated as `False` and the + # expression should evaluate as such. + python_tokens = [] + for token in tokens: + if token not in {'or', 'and', 'with', '(', ')'}: + python_tokens.append('False') + elif token == 'with': # noqa: S105 + python_tokens.append('or') + elif token == '(' and python_tokens and python_tokens[-1] not in {'or', 'and'}: # noqa: S105 + message = f'invalid license expression: {raw_license_expression}' + raise ValueError(message) + else: + python_tokens.append(token) + + python_expression = ' '.join(python_tokens) + try: + result = eval(python_expression) # noqa: PGH001, S307 + except Exception: # noqa: BLE001 + result = True + + if result is not False: + message = f'invalid license expression: {raw_license_expression}' + raise ValueError(message) from None + + # Take a final pass to check for unknown licenses/exceptions + normalized_tokens = [] + for token in tokens: + if token in {'or', 'and', 'with', '(', ')'}: + normalized_tokens.append(token.upper()) + continue + + if normalized_tokens and normalized_tokens[-1] == 'WITH': + if token not in EXCEPTIONS: + message = f'unknown license exception: {token}' + raise ValueError(message) + + normalized_tokens.append(cast(str, EXCEPTIONS[token]['id'])) + else: + if token.endswith('+'): + final_token = token[:-1] + suffix = '+' + else: + final_token = token + suffix = '' + + if final_token not in valid_licenses: + message = f'unknown license: {final_token}' + raise ValueError(message) + + normalized_tokens.append(cast(str, valid_licenses[final_token]['id']) + suffix) + + # Construct the normalized expression + normalized_expression = ' '.join(normalized_tokens) + + # Fix internal padding for parentheses + return normalized_expression.replace('( ', '(').replace(' )', ')') diff --git a/src/hatchling/licenses/supported.py b/src/hatchling/licenses/supported.py new file mode 100644 index 0000000..32fbe2b --- /dev/null +++ b/src/hatchling/licenses/supported.py @@ -0,0 +1,554 @@ +from __future__ import annotations + +VERSION = '3.19' + +LICENSES: dict[str, dict[str, str | bool]] = { + '0bsd': {'id': '0BSD', 'deprecated': False}, + 'aal': {'id': 'AAL', 'deprecated': False}, + 'abstyles': {'id': 'Abstyles', 'deprecated': False}, + 'adobe-2006': {'id': 'Adobe-2006', 'deprecated': False}, + 'adobe-glyph': {'id': 'Adobe-Glyph', 'deprecated': False}, + 'adsl': {'id': 'ADSL', 'deprecated': False}, + 'afl-1.1': {'id': 'AFL-1.1', 'deprecated': False}, + 'afl-1.2': {'id': 'AFL-1.2', 'deprecated': False}, + 'afl-2.0': {'id': 'AFL-2.0', 'deprecated': False}, + 'afl-2.1': {'id': 'AFL-2.1', 'deprecated': False}, + 'afl-3.0': {'id': 'AFL-3.0', 'deprecated': False}, + 'afmparse': {'id': 'Afmparse', 'deprecated': False}, + 'agpl-1.0': {'id': 'AGPL-1.0', 'deprecated': True}, + 'agpl-1.0-only': {'id': 'AGPL-1.0-only', 'deprecated': False}, + 'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later', 'deprecated': False}, + 'agpl-3.0': {'id': 'AGPL-3.0', 'deprecated': True}, + 'agpl-3.0-only': {'id': 'AGPL-3.0-only', 'deprecated': False}, + 'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later', 'deprecated': False}, + 'aladdin': {'id': 'Aladdin', 'deprecated': False}, + 'amdplpa': {'id': 'AMDPLPA', 'deprecated': False}, + 'aml': {'id': 'AML', 'deprecated': False}, + 'ampas': {'id': 'AMPAS', 'deprecated': False}, + 'antlr-pd': {'id': 'ANTLR-PD', 'deprecated': False}, + 'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback', 'deprecated': False}, + 'apache-1.0': {'id': 'Apache-1.0', 'deprecated': False}, + 'apache-1.1': {'id': 'Apache-1.1', 'deprecated': False}, + 'apache-2.0': {'id': 'Apache-2.0', 'deprecated': False}, + 'apafml': {'id': 'APAFML', 'deprecated': False}, + 'apl-1.0': {'id': 'APL-1.0', 'deprecated': False}, + 'app-s2p': {'id': 'App-s2p', 'deprecated': False}, + 'apsl-1.0': {'id': 'APSL-1.0', 'deprecated': False}, + 'apsl-1.1': {'id': 'APSL-1.1', 'deprecated': False}, + 'apsl-1.2': {'id': 'APSL-1.2', 'deprecated': False}, + 'apsl-2.0': {'id': 'APSL-2.0', 'deprecated': False}, + 'arphic-1999': {'id': 'Arphic-1999', 'deprecated': False}, + 'artistic-1.0': {'id': 'Artistic-1.0', 'deprecated': False}, + 'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8', 'deprecated': False}, + 'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl', 'deprecated': False}, + 'artistic-2.0': {'id': 'Artistic-2.0', 'deprecated': False}, + 'baekmuk': {'id': 'Baekmuk', 'deprecated': False}, + 'bahyph': {'id': 'Bahyph', 'deprecated': False}, + 'barr': {'id': 'Barr', 'deprecated': False}, + 'beerware': {'id': 'Beerware', 'deprecated': False}, + 'bitstream-vera': {'id': 'Bitstream-Vera', 'deprecated': False}, + 'bittorrent-1.0': {'id': 'BitTorrent-1.0', 'deprecated': False}, + 'bittorrent-1.1': {'id': 'BitTorrent-1.1', 'deprecated': False}, + 'blessing': {'id': 'blessing', 'deprecated': False}, + 'blueoak-1.0.0': {'id': 'BlueOak-1.0.0', 'deprecated': False}, + 'borceux': {'id': 'Borceux', 'deprecated': False}, + 'bsd-1-clause': {'id': 'BSD-1-Clause', 'deprecated': False}, + 'bsd-2-clause': {'id': 'BSD-2-Clause', 'deprecated': False}, + 'bsd-2-clause-freebsd': {'id': 'BSD-2-Clause-FreeBSD', 'deprecated': True}, + 'bsd-2-clause-netbsd': {'id': 'BSD-2-Clause-NetBSD', 'deprecated': True}, + 'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent', 'deprecated': False}, + 'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views', 'deprecated': False}, + 'bsd-3-clause': {'id': 'BSD-3-Clause', 'deprecated': False}, + 'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution', 'deprecated': False}, + 'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear', 'deprecated': False}, + 'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL', 'deprecated': False}, + 'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification', 'deprecated': False}, + 'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty', 'deprecated': False}, + 'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI', 'deprecated': False}, + 'bsd-4-clause': {'id': 'BSD-4-Clause', 'deprecated': False}, + 'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened', 'deprecated': False}, + 'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC', 'deprecated': False}, + 'bsd-protection': {'id': 'BSD-Protection', 'deprecated': False}, + 'bsd-source-code': {'id': 'BSD-Source-Code', 'deprecated': False}, + 'bsl-1.0': {'id': 'BSL-1.0', 'deprecated': False}, + 'busl-1.1': {'id': 'BUSL-1.1', 'deprecated': False}, + 'bzip2-1.0.5': {'id': 'bzip2-1.0.5', 'deprecated': True}, + 'bzip2-1.0.6': {'id': 'bzip2-1.0.6', 'deprecated': False}, + 'c-uda-1.0': {'id': 'C-UDA-1.0', 'deprecated': False}, + 'cal-1.0': {'id': 'CAL-1.0', 'deprecated': False}, + 'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception', 'deprecated': False}, + 'caldera': {'id': 'Caldera', 'deprecated': False}, + 'catosl-1.1': {'id': 'CATOSL-1.1', 'deprecated': False}, + 'cc-by-1.0': {'id': 'CC-BY-1.0', 'deprecated': False}, + 'cc-by-2.0': {'id': 'CC-BY-2.0', 'deprecated': False}, + 'cc-by-2.5': {'id': 'CC-BY-2.5', 'deprecated': False}, + 'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU', 'deprecated': False}, + 'cc-by-3.0': {'id': 'CC-BY-3.0', 'deprecated': False}, + 'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT', 'deprecated': False}, + 'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE', 'deprecated': False}, + 'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO', 'deprecated': False}, + 'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL', 'deprecated': False}, + 'cc-by-3.0-us': {'id': 'CC-BY-3.0-US', 'deprecated': False}, + 'cc-by-4.0': {'id': 'CC-BY-4.0', 'deprecated': False}, + 'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0', 'deprecated': False}, + 'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0', 'deprecated': False}, + 'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5', 'deprecated': False}, + 'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0', 'deprecated': False}, + 'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE', 'deprecated': False}, + 'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0', 'deprecated': False}, + 'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0', 'deprecated': False}, + 'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0', 'deprecated': False}, + 'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5', 'deprecated': False}, + 'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0', 'deprecated': False}, + 'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE', 'deprecated': False}, + 'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO', 'deprecated': False}, + 'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0', 'deprecated': False}, + 'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0', 'deprecated': False}, + 'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0', 'deprecated': False}, + 'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR', 'deprecated': False}, + 'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK', 'deprecated': False}, + 'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5', 'deprecated': False}, + 'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0', 'deprecated': False}, + 'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE', 'deprecated': False}, + 'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO', 'deprecated': False}, + 'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0', 'deprecated': False}, + 'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0', 'deprecated': False}, + 'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0', 'deprecated': False}, + 'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5', 'deprecated': False}, + 'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0', 'deprecated': False}, + 'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE', 'deprecated': False}, + 'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0', 'deprecated': False}, + 'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0', 'deprecated': False}, + 'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0', 'deprecated': False}, + 'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK', 'deprecated': False}, + 'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP', 'deprecated': False}, + 'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5', 'deprecated': False}, + 'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0', 'deprecated': False}, + 'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT', 'deprecated': False}, + 'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE', 'deprecated': False}, + 'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0', 'deprecated': False}, + 'cc-pddc': {'id': 'CC-PDDC', 'deprecated': False}, + 'cc0-1.0': {'id': 'CC0-1.0', 'deprecated': False}, + 'cddl-1.0': {'id': 'CDDL-1.0', 'deprecated': False}, + 'cddl-1.1': {'id': 'CDDL-1.1', 'deprecated': False}, + 'cdl-1.0': {'id': 'CDL-1.0', 'deprecated': False}, + 'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0', 'deprecated': False}, + 'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0', 'deprecated': False}, + 'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0', 'deprecated': False}, + 'cecill-1.0': {'id': 'CECILL-1.0', 'deprecated': False}, + 'cecill-1.1': {'id': 'CECILL-1.1', 'deprecated': False}, + 'cecill-2.0': {'id': 'CECILL-2.0', 'deprecated': False}, + 'cecill-2.1': {'id': 'CECILL-2.1', 'deprecated': False}, + 'cecill-b': {'id': 'CECILL-B', 'deprecated': False}, + 'cecill-c': {'id': 'CECILL-C', 'deprecated': False}, + 'cern-ohl-1.1': {'id': 'CERN-OHL-1.1', 'deprecated': False}, + 'cern-ohl-1.2': {'id': 'CERN-OHL-1.2', 'deprecated': False}, + 'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0', 'deprecated': False}, + 'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0', 'deprecated': False}, + 'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0', 'deprecated': False}, + 'checkmk': {'id': 'checkmk', 'deprecated': False}, + 'clartistic': {'id': 'ClArtistic', 'deprecated': False}, + 'cnri-jython': {'id': 'CNRI-Jython', 'deprecated': False}, + 'cnri-python': {'id': 'CNRI-Python', 'deprecated': False}, + 'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible', 'deprecated': False}, + 'coil-1.0': {'id': 'COIL-1.0', 'deprecated': False}, + 'community-spec-1.0': {'id': 'Community-Spec-1.0', 'deprecated': False}, + 'condor-1.1': {'id': 'Condor-1.1', 'deprecated': False}, + 'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0', 'deprecated': False}, + 'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1', 'deprecated': False}, + 'cpal-1.0': {'id': 'CPAL-1.0', 'deprecated': False}, + 'cpl-1.0': {'id': 'CPL-1.0', 'deprecated': False}, + 'cpol-1.02': {'id': 'CPOL-1.02', 'deprecated': False}, + 'crossword': {'id': 'Crossword', 'deprecated': False}, + 'crystalstacker': {'id': 'CrystalStacker', 'deprecated': False}, + 'cua-opl-1.0': {'id': 'CUA-OPL-1.0', 'deprecated': False}, + 'cube': {'id': 'Cube', 'deprecated': False}, + 'curl': {'id': 'curl', 'deprecated': False}, + 'd-fsl-1.0': {'id': 'D-FSL-1.0', 'deprecated': False}, + 'diffmark': {'id': 'diffmark', 'deprecated': False}, + 'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0', 'deprecated': False}, + 'doc': {'id': 'DOC', 'deprecated': False}, + 'dotseqn': {'id': 'Dotseqn', 'deprecated': False}, + 'drl-1.0': {'id': 'DRL-1.0', 'deprecated': False}, + 'dsdp': {'id': 'DSDP', 'deprecated': False}, + 'dvipdfm': {'id': 'dvipdfm', 'deprecated': False}, + 'ecl-1.0': {'id': 'ECL-1.0', 'deprecated': False}, + 'ecl-2.0': {'id': 'ECL-2.0', 'deprecated': False}, + 'ecos-2.0': {'id': 'eCos-2.0', 'deprecated': True}, + 'efl-1.0': {'id': 'EFL-1.0', 'deprecated': False}, + 'efl-2.0': {'id': 'EFL-2.0', 'deprecated': False}, + 'egenix': {'id': 'eGenix', 'deprecated': False}, + 'elastic-2.0': {'id': 'Elastic-2.0', 'deprecated': False}, + 'entessa': {'id': 'Entessa', 'deprecated': False}, + 'epics': {'id': 'EPICS', 'deprecated': False}, + 'epl-1.0': {'id': 'EPL-1.0', 'deprecated': False}, + 'epl-2.0': {'id': 'EPL-2.0', 'deprecated': False}, + 'erlpl-1.1': {'id': 'ErlPL-1.1', 'deprecated': False}, + 'etalab-2.0': {'id': 'etalab-2.0', 'deprecated': False}, + 'eudatagrid': {'id': 'EUDatagrid', 'deprecated': False}, + 'eupl-1.0': {'id': 'EUPL-1.0', 'deprecated': False}, + 'eupl-1.1': {'id': 'EUPL-1.1', 'deprecated': False}, + 'eupl-1.2': {'id': 'EUPL-1.2', 'deprecated': False}, + 'eurosym': {'id': 'Eurosym', 'deprecated': False}, + 'fair': {'id': 'Fair', 'deprecated': False}, + 'fdk-aac': {'id': 'FDK-AAC', 'deprecated': False}, + 'frameworx-1.0': {'id': 'Frameworx-1.0', 'deprecated': False}, + 'freebsd-doc': {'id': 'FreeBSD-DOC', 'deprecated': False}, + 'freeimage': {'id': 'FreeImage', 'deprecated': False}, + 'fsfap': {'id': 'FSFAP', 'deprecated': False}, + 'fsful': {'id': 'FSFUL', 'deprecated': False}, + 'fsfullr': {'id': 'FSFULLR', 'deprecated': False}, + 'fsfullrwd': {'id': 'FSFULLRWD', 'deprecated': False}, + 'ftl': {'id': 'FTL', 'deprecated': False}, + 'gd': {'id': 'GD', 'deprecated': False}, + 'gfdl-1.1': {'id': 'GFDL-1.1', 'deprecated': True}, + 'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only', 'deprecated': False}, + 'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later', 'deprecated': False}, + 'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only', 'deprecated': False}, + 'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.1-only': {'id': 'GFDL-1.1-only', 'deprecated': False}, + 'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later', 'deprecated': False}, + 'gfdl-1.2': {'id': 'GFDL-1.2', 'deprecated': True}, + 'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only', 'deprecated': False}, + 'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later', 'deprecated': False}, + 'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only', 'deprecated': False}, + 'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.2-only': {'id': 'GFDL-1.2-only', 'deprecated': False}, + 'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later', 'deprecated': False}, + 'gfdl-1.3': {'id': 'GFDL-1.3', 'deprecated': True}, + 'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only', 'deprecated': False}, + 'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later', 'deprecated': False}, + 'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only', 'deprecated': False}, + 'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.3-only': {'id': 'GFDL-1.3-only', 'deprecated': False}, + 'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later', 'deprecated': False}, + 'giftware': {'id': 'Giftware', 'deprecated': False}, + 'gl2ps': {'id': 'GL2PS', 'deprecated': False}, + 'glide': {'id': 'Glide', 'deprecated': False}, + 'glulxe': {'id': 'Glulxe', 'deprecated': False}, + 'glwtpl': {'id': 'GLWTPL', 'deprecated': False}, + 'gnuplot': {'id': 'gnuplot', 'deprecated': False}, + 'gpl-1.0': {'id': 'GPL-1.0', 'deprecated': True}, + 'gpl-1.0+': {'id': 'GPL-1.0+', 'deprecated': True}, + 'gpl-1.0-only': {'id': 'GPL-1.0-only', 'deprecated': False}, + 'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later', 'deprecated': False}, + 'gpl-2.0': {'id': 'GPL-2.0', 'deprecated': True}, + 'gpl-2.0+': {'id': 'GPL-2.0+', 'deprecated': True}, + 'gpl-2.0-only': {'id': 'GPL-2.0-only', 'deprecated': False}, + 'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later', 'deprecated': False}, + 'gpl-2.0-with-autoconf-exception': {'id': 'GPL-2.0-with-autoconf-exception', 'deprecated': True}, + 'gpl-2.0-with-bison-exception': {'id': 'GPL-2.0-with-bison-exception', 'deprecated': True}, + 'gpl-2.0-with-classpath-exception': {'id': 'GPL-2.0-with-classpath-exception', 'deprecated': True}, + 'gpl-2.0-with-font-exception': {'id': 'GPL-2.0-with-font-exception', 'deprecated': True}, + 'gpl-2.0-with-gcc-exception': {'id': 'GPL-2.0-with-GCC-exception', 'deprecated': True}, + 'gpl-3.0': {'id': 'GPL-3.0', 'deprecated': True}, + 'gpl-3.0+': {'id': 'GPL-3.0+', 'deprecated': True}, + 'gpl-3.0-only': {'id': 'GPL-3.0-only', 'deprecated': False}, + 'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later', 'deprecated': False}, + 'gpl-3.0-with-autoconf-exception': {'id': 'GPL-3.0-with-autoconf-exception', 'deprecated': True}, + 'gpl-3.0-with-gcc-exception': {'id': 'GPL-3.0-with-GCC-exception', 'deprecated': True}, + 'gsoap-1.3b': {'id': 'gSOAP-1.3b', 'deprecated': False}, + 'haskellreport': {'id': 'HaskellReport', 'deprecated': False}, + 'hippocratic-2.1': {'id': 'Hippocratic-2.1', 'deprecated': False}, + 'hpnd': {'id': 'HPND', 'deprecated': False}, + 'hpnd-sell-variant': {'id': 'HPND-sell-variant', 'deprecated': False}, + 'htmltidy': {'id': 'HTMLTIDY', 'deprecated': False}, + 'ibm-pibs': {'id': 'IBM-pibs', 'deprecated': False}, + 'icu': {'id': 'ICU', 'deprecated': False}, + 'ijg': {'id': 'IJG', 'deprecated': False}, + 'imagemagick': {'id': 'ImageMagick', 'deprecated': False}, + 'imatix': {'id': 'iMatix', 'deprecated': False}, + 'imlib2': {'id': 'Imlib2', 'deprecated': False}, + 'info-zip': {'id': 'Info-ZIP', 'deprecated': False}, + 'intel': {'id': 'Intel', 'deprecated': False}, + 'intel-acpi': {'id': 'Intel-ACPI', 'deprecated': False}, + 'interbase-1.0': {'id': 'Interbase-1.0', 'deprecated': False}, + 'ipa': {'id': 'IPA', 'deprecated': False}, + 'ipl-1.0': {'id': 'IPL-1.0', 'deprecated': False}, + 'isc': {'id': 'ISC', 'deprecated': False}, + 'jam': {'id': 'Jam', 'deprecated': False}, + 'jasper-2.0': {'id': 'JasPer-2.0', 'deprecated': False}, + 'jpnic': {'id': 'JPNIC', 'deprecated': False}, + 'json': {'id': 'JSON', 'deprecated': False}, + 'knuth-ctan': {'id': 'Knuth-CTAN', 'deprecated': False}, + 'lal-1.2': {'id': 'LAL-1.2', 'deprecated': False}, + 'lal-1.3': {'id': 'LAL-1.3', 'deprecated': False}, + 'latex2e': {'id': 'Latex2e', 'deprecated': False}, + 'leptonica': {'id': 'Leptonica', 'deprecated': False}, + 'lgpl-2.0': {'id': 'LGPL-2.0', 'deprecated': True}, + 'lgpl-2.0+': {'id': 'LGPL-2.0+', 'deprecated': True}, + 'lgpl-2.0-only': {'id': 'LGPL-2.0-only', 'deprecated': False}, + 'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later', 'deprecated': False}, + 'lgpl-2.1': {'id': 'LGPL-2.1', 'deprecated': True}, + 'lgpl-2.1+': {'id': 'LGPL-2.1+', 'deprecated': True}, + 'lgpl-2.1-only': {'id': 'LGPL-2.1-only', 'deprecated': False}, + 'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later', 'deprecated': False}, + 'lgpl-3.0': {'id': 'LGPL-3.0', 'deprecated': True}, + 'lgpl-3.0+': {'id': 'LGPL-3.0+', 'deprecated': True}, + 'lgpl-3.0-only': {'id': 'LGPL-3.0-only', 'deprecated': False}, + 'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later', 'deprecated': False}, + 'lgpllr': {'id': 'LGPLLR', 'deprecated': False}, + 'libpng': {'id': 'Libpng', 'deprecated': False}, + 'libpng-2.0': {'id': 'libpng-2.0', 'deprecated': False}, + 'libselinux-1.0': {'id': 'libselinux-1.0', 'deprecated': False}, + 'libtiff': {'id': 'libtiff', 'deprecated': False}, + 'libutil-david-nugent': {'id': 'libutil-David-Nugent', 'deprecated': False}, + 'liliq-p-1.1': {'id': 'LiLiQ-P-1.1', 'deprecated': False}, + 'liliq-r-1.1': {'id': 'LiLiQ-R-1.1', 'deprecated': False}, + 'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1', 'deprecated': False}, + 'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft', 'deprecated': False}, + 'linux-openib': {'id': 'Linux-OpenIB', 'deprecated': False}, + 'lpl-1.0': {'id': 'LPL-1.0', 'deprecated': False}, + 'lpl-1.02': {'id': 'LPL-1.02', 'deprecated': False}, + 'lppl-1.0': {'id': 'LPPL-1.0', 'deprecated': False}, + 'lppl-1.1': {'id': 'LPPL-1.1', 'deprecated': False}, + 'lppl-1.2': {'id': 'LPPL-1.2', 'deprecated': False}, + 'lppl-1.3a': {'id': 'LPPL-1.3a', 'deprecated': False}, + 'lppl-1.3c': {'id': 'LPPL-1.3c', 'deprecated': False}, + 'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20', 'deprecated': False}, + 'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22', 'deprecated': False}, + 'makeindex': {'id': 'MakeIndex', 'deprecated': False}, + 'minpack': {'id': 'Minpack', 'deprecated': False}, + 'miros': {'id': 'MirOS', 'deprecated': False}, + 'mit': {'id': 'MIT', 'deprecated': False}, + 'mit-0': {'id': 'MIT-0', 'deprecated': False}, + 'mit-advertising': {'id': 'MIT-advertising', 'deprecated': False}, + 'mit-cmu': {'id': 'MIT-CMU', 'deprecated': False}, + 'mit-enna': {'id': 'MIT-enna', 'deprecated': False}, + 'mit-feh': {'id': 'MIT-feh', 'deprecated': False}, + 'mit-modern-variant': {'id': 'MIT-Modern-Variant', 'deprecated': False}, + 'mit-open-group': {'id': 'MIT-open-group', 'deprecated': False}, + 'mitnfa': {'id': 'MITNFA', 'deprecated': False}, + 'motosoto': {'id': 'Motosoto', 'deprecated': False}, + 'mpi-permissive': {'id': 'mpi-permissive', 'deprecated': False}, + 'mpich2': {'id': 'mpich2', 'deprecated': False}, + 'mpl-1.0': {'id': 'MPL-1.0', 'deprecated': False}, + 'mpl-1.1': {'id': 'MPL-1.1', 'deprecated': False}, + 'mpl-2.0': {'id': 'MPL-2.0', 'deprecated': False}, + 'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception', 'deprecated': False}, + 'mplus': {'id': 'mplus', 'deprecated': False}, + 'ms-lpl': {'id': 'MS-LPL', 'deprecated': False}, + 'ms-pl': {'id': 'MS-PL', 'deprecated': False}, + 'ms-rl': {'id': 'MS-RL', 'deprecated': False}, + 'mtll': {'id': 'MTLL', 'deprecated': False}, + 'mulanpsl-1.0': {'id': 'MulanPSL-1.0', 'deprecated': False}, + 'mulanpsl-2.0': {'id': 'MulanPSL-2.0', 'deprecated': False}, + 'multics': {'id': 'Multics', 'deprecated': False}, + 'mup': {'id': 'Mup', 'deprecated': False}, + 'naist-2003': {'id': 'NAIST-2003', 'deprecated': False}, + 'nasa-1.3': {'id': 'NASA-1.3', 'deprecated': False}, + 'naumen': {'id': 'Naumen', 'deprecated': False}, + 'nbpl-1.0': {'id': 'NBPL-1.0', 'deprecated': False}, + 'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0', 'deprecated': False}, + 'ncsa': {'id': 'NCSA', 'deprecated': False}, + 'net-snmp': {'id': 'Net-SNMP', 'deprecated': False}, + 'netcdf': {'id': 'NetCDF', 'deprecated': False}, + 'newsletr': {'id': 'Newsletr', 'deprecated': False}, + 'ngpl': {'id': 'NGPL', 'deprecated': False}, + 'nicta-1.0': {'id': 'NICTA-1.0', 'deprecated': False}, + 'nist-pd': {'id': 'NIST-PD', 'deprecated': False}, + 'nist-pd-fallback': {'id': 'NIST-PD-fallback', 'deprecated': False}, + 'nlod-1.0': {'id': 'NLOD-1.0', 'deprecated': False}, + 'nlod-2.0': {'id': 'NLOD-2.0', 'deprecated': False}, + 'nlpl': {'id': 'NLPL', 'deprecated': False}, + 'nokia': {'id': 'Nokia', 'deprecated': False}, + 'nosl': {'id': 'NOSL', 'deprecated': False}, + 'noweb': {'id': 'Noweb', 'deprecated': False}, + 'npl-1.0': {'id': 'NPL-1.0', 'deprecated': False}, + 'npl-1.1': {'id': 'NPL-1.1', 'deprecated': False}, + 'nposl-3.0': {'id': 'NPOSL-3.0', 'deprecated': False}, + 'nrl': {'id': 'NRL', 'deprecated': False}, + 'ntp': {'id': 'NTP', 'deprecated': False}, + 'ntp-0': {'id': 'NTP-0', 'deprecated': False}, + 'nunit': {'id': 'Nunit', 'deprecated': True}, + 'o-uda-1.0': {'id': 'O-UDA-1.0', 'deprecated': False}, + 'occt-pl': {'id': 'OCCT-PL', 'deprecated': False}, + 'oclc-2.0': {'id': 'OCLC-2.0', 'deprecated': False}, + 'odbl-1.0': {'id': 'ODbL-1.0', 'deprecated': False}, + 'odc-by-1.0': {'id': 'ODC-By-1.0', 'deprecated': False}, + 'ofl-1.0': {'id': 'OFL-1.0', 'deprecated': False}, + 'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN', 'deprecated': False}, + 'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN', 'deprecated': False}, + 'ofl-1.1': {'id': 'OFL-1.1', 'deprecated': False}, + 'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN', 'deprecated': False}, + 'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN', 'deprecated': False}, + 'ogc-1.0': {'id': 'OGC-1.0', 'deprecated': False}, + 'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0', 'deprecated': False}, + 'ogl-canada-2.0': {'id': 'OGL-Canada-2.0', 'deprecated': False}, + 'ogl-uk-1.0': {'id': 'OGL-UK-1.0', 'deprecated': False}, + 'ogl-uk-2.0': {'id': 'OGL-UK-2.0', 'deprecated': False}, + 'ogl-uk-3.0': {'id': 'OGL-UK-3.0', 'deprecated': False}, + 'ogtsl': {'id': 'OGTSL', 'deprecated': False}, + 'oldap-1.1': {'id': 'OLDAP-1.1', 'deprecated': False}, + 'oldap-1.2': {'id': 'OLDAP-1.2', 'deprecated': False}, + 'oldap-1.3': {'id': 'OLDAP-1.3', 'deprecated': False}, + 'oldap-1.4': {'id': 'OLDAP-1.4', 'deprecated': False}, + 'oldap-2.0': {'id': 'OLDAP-2.0', 'deprecated': False}, + 'oldap-2.0.1': {'id': 'OLDAP-2.0.1', 'deprecated': False}, + 'oldap-2.1': {'id': 'OLDAP-2.1', 'deprecated': False}, + 'oldap-2.2': {'id': 'OLDAP-2.2', 'deprecated': False}, + 'oldap-2.2.1': {'id': 'OLDAP-2.2.1', 'deprecated': False}, + 'oldap-2.2.2': {'id': 'OLDAP-2.2.2', 'deprecated': False}, + 'oldap-2.3': {'id': 'OLDAP-2.3', 'deprecated': False}, + 'oldap-2.4': {'id': 'OLDAP-2.4', 'deprecated': False}, + 'oldap-2.5': {'id': 'OLDAP-2.5', 'deprecated': False}, + 'oldap-2.6': {'id': 'OLDAP-2.6', 'deprecated': False}, + 'oldap-2.7': {'id': 'OLDAP-2.7', 'deprecated': False}, + 'oldap-2.8': {'id': 'OLDAP-2.8', 'deprecated': False}, + 'oml': {'id': 'OML', 'deprecated': False}, + 'openssl': {'id': 'OpenSSL', 'deprecated': False}, + 'opl-1.0': {'id': 'OPL-1.0', 'deprecated': False}, + 'opubl-1.0': {'id': 'OPUBL-1.0', 'deprecated': False}, + 'oset-pl-2.1': {'id': 'OSET-PL-2.1', 'deprecated': False}, + 'osl-1.0': {'id': 'OSL-1.0', 'deprecated': False}, + 'osl-1.1': {'id': 'OSL-1.1', 'deprecated': False}, + 'osl-2.0': {'id': 'OSL-2.0', 'deprecated': False}, + 'osl-2.1': {'id': 'OSL-2.1', 'deprecated': False}, + 'osl-3.0': {'id': 'OSL-3.0', 'deprecated': False}, + 'parity-6.0.0': {'id': 'Parity-6.0.0', 'deprecated': False}, + 'parity-7.0.0': {'id': 'Parity-7.0.0', 'deprecated': False}, + 'pddl-1.0': {'id': 'PDDL-1.0', 'deprecated': False}, + 'php-3.0': {'id': 'PHP-3.0', 'deprecated': False}, + 'php-3.01': {'id': 'PHP-3.01', 'deprecated': False}, + 'plexus': {'id': 'Plexus', 'deprecated': False}, + 'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0', 'deprecated': False}, + 'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0', 'deprecated': False}, + 'postgresql': {'id': 'PostgreSQL', 'deprecated': False}, + 'psf-2.0': {'id': 'PSF-2.0', 'deprecated': False}, + 'psfrag': {'id': 'psfrag', 'deprecated': False}, + 'psutils': {'id': 'psutils', 'deprecated': False}, + 'python-2.0': {'id': 'Python-2.0', 'deprecated': False}, + 'python-2.0.1': {'id': 'Python-2.0.1', 'deprecated': False}, + 'qhull': {'id': 'Qhull', 'deprecated': False}, + 'qpl-1.0': {'id': 'QPL-1.0', 'deprecated': False}, + 'rdisc': {'id': 'Rdisc', 'deprecated': False}, + 'rhecos-1.1': {'id': 'RHeCos-1.1', 'deprecated': False}, + 'rpl-1.1': {'id': 'RPL-1.1', 'deprecated': False}, + 'rpl-1.5': {'id': 'RPL-1.5', 'deprecated': False}, + 'rpsl-1.0': {'id': 'RPSL-1.0', 'deprecated': False}, + 'rsa-md': {'id': 'RSA-MD', 'deprecated': False}, + 'rscpl': {'id': 'RSCPL', 'deprecated': False}, + 'ruby': {'id': 'Ruby', 'deprecated': False}, + 'sax-pd': {'id': 'SAX-PD', 'deprecated': False}, + 'saxpath': {'id': 'Saxpath', 'deprecated': False}, + 'scea': {'id': 'SCEA', 'deprecated': False}, + 'schemereport': {'id': 'SchemeReport', 'deprecated': False}, + 'sendmail': {'id': 'Sendmail', 'deprecated': False}, + 'sendmail-8.23': {'id': 'Sendmail-8.23', 'deprecated': False}, + 'sgi-b-1.0': {'id': 'SGI-B-1.0', 'deprecated': False}, + 'sgi-b-1.1': {'id': 'SGI-B-1.1', 'deprecated': False}, + 'sgi-b-2.0': {'id': 'SGI-B-2.0', 'deprecated': False}, + 'shl-0.5': {'id': 'SHL-0.5', 'deprecated': False}, + 'shl-0.51': {'id': 'SHL-0.51', 'deprecated': False}, + 'simpl-2.0': {'id': 'SimPL-2.0', 'deprecated': False}, + 'sissl': {'id': 'SISSL', 'deprecated': False}, + 'sissl-1.2': {'id': 'SISSL-1.2', 'deprecated': False}, + 'sleepycat': {'id': 'Sleepycat', 'deprecated': False}, + 'smlnj': {'id': 'SMLNJ', 'deprecated': False}, + 'smppl': {'id': 'SMPPL', 'deprecated': False}, + 'snia': {'id': 'SNIA', 'deprecated': False}, + 'spencer-86': {'id': 'Spencer-86', 'deprecated': False}, + 'spencer-94': {'id': 'Spencer-94', 'deprecated': False}, + 'spencer-99': {'id': 'Spencer-99', 'deprecated': False}, + 'spl-1.0': {'id': 'SPL-1.0', 'deprecated': False}, + 'ssh-openssh': {'id': 'SSH-OpenSSH', 'deprecated': False}, + 'ssh-short': {'id': 'SSH-short', 'deprecated': False}, + 'sspl-1.0': {'id': 'SSPL-1.0', 'deprecated': False}, + 'standardml-nj': {'id': 'StandardML-NJ', 'deprecated': True}, + 'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3', 'deprecated': False}, + 'swl': {'id': 'SWL', 'deprecated': False}, + 'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0', 'deprecated': False}, + 'tcl': {'id': 'TCL', 'deprecated': False}, + 'tcp-wrappers': {'id': 'TCP-wrappers', 'deprecated': False}, + 'tmate': {'id': 'TMate', 'deprecated': False}, + 'torque-1.1': {'id': 'TORQUE-1.1', 'deprecated': False}, + 'tosl': {'id': 'TOSL', 'deprecated': False}, + 'tu-berlin-1.0': {'id': 'TU-Berlin-1.0', 'deprecated': False}, + 'tu-berlin-2.0': {'id': 'TU-Berlin-2.0', 'deprecated': False}, + 'ucl-1.0': {'id': 'UCL-1.0', 'deprecated': False}, + 'unicode-dfs-2015': {'id': 'Unicode-DFS-2015', 'deprecated': False}, + 'unicode-dfs-2016': {'id': 'Unicode-DFS-2016', 'deprecated': False}, + 'unicode-tou': {'id': 'Unicode-TOU', 'deprecated': False}, + 'unlicense': {'id': 'Unlicense', 'deprecated': False}, + 'upl-1.0': {'id': 'UPL-1.0', 'deprecated': False}, + 'vim': {'id': 'Vim', 'deprecated': False}, + 'vostrom': {'id': 'VOSTROM', 'deprecated': False}, + 'vsl-1.0': {'id': 'VSL-1.0', 'deprecated': False}, + 'w3c': {'id': 'W3C', 'deprecated': False}, + 'w3c-19980720': {'id': 'W3C-19980720', 'deprecated': False}, + 'w3c-20150513': {'id': 'W3C-20150513', 'deprecated': False}, + 'watcom-1.0': {'id': 'Watcom-1.0', 'deprecated': False}, + 'wsuipa': {'id': 'Wsuipa', 'deprecated': False}, + 'wtfpl': {'id': 'WTFPL', 'deprecated': False}, + 'wxwindows': {'id': 'wxWindows', 'deprecated': True}, + 'x11': {'id': 'X11', 'deprecated': False}, + 'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant', 'deprecated': False}, + 'xerox': {'id': 'Xerox', 'deprecated': False}, + 'xfree86-1.1': {'id': 'XFree86-1.1', 'deprecated': False}, + 'xinetd': {'id': 'xinetd', 'deprecated': False}, + 'xnet': {'id': 'Xnet', 'deprecated': False}, + 'xpp': {'id': 'xpp', 'deprecated': False}, + 'xskat': {'id': 'XSkat', 'deprecated': False}, + 'ypl-1.0': {'id': 'YPL-1.0', 'deprecated': False}, + 'ypl-1.1': {'id': 'YPL-1.1', 'deprecated': False}, + 'zed': {'id': 'Zed', 'deprecated': False}, + 'zend-2.0': {'id': 'Zend-2.0', 'deprecated': False}, + 'zimbra-1.3': {'id': 'Zimbra-1.3', 'deprecated': False}, + 'zimbra-1.4': {'id': 'Zimbra-1.4', 'deprecated': False}, + 'zlib': {'id': 'Zlib', 'deprecated': False}, + 'zlib-acknowledgement': {'id': 'zlib-acknowledgement', 'deprecated': False}, + 'zpl-1.1': {'id': 'ZPL-1.1', 'deprecated': False}, + 'zpl-2.0': {'id': 'ZPL-2.0', 'deprecated': False}, + 'zpl-2.1': {'id': 'ZPL-2.1', 'deprecated': False}, +} + +EXCEPTIONS: dict[str, dict[str, str | bool]] = { + '389-exception': {'id': '389-exception', 'deprecated': False}, + 'autoconf-exception-2.0': {'id': 'Autoconf-exception-2.0', 'deprecated': False}, + 'autoconf-exception-3.0': {'id': 'Autoconf-exception-3.0', 'deprecated': False}, + 'bison-exception-2.2': {'id': 'Bison-exception-2.2', 'deprecated': False}, + 'bootloader-exception': {'id': 'Bootloader-exception', 'deprecated': False}, + 'classpath-exception-2.0': {'id': 'Classpath-exception-2.0', 'deprecated': False}, + 'clisp-exception-2.0': {'id': 'CLISP-exception-2.0', 'deprecated': False}, + 'digirule-foss-exception': {'id': 'DigiRule-FOSS-exception', 'deprecated': False}, + 'ecos-exception-2.0': {'id': 'eCos-exception-2.0', 'deprecated': False}, + 'fawkes-runtime-exception': {'id': 'Fawkes-Runtime-exception', 'deprecated': False}, + 'fltk-exception': {'id': 'FLTK-exception', 'deprecated': False}, + 'font-exception-2.0': {'id': 'Font-exception-2.0', 'deprecated': False}, + 'freertos-exception-2.0': {'id': 'freertos-exception-2.0', 'deprecated': False}, + 'gcc-exception-2.0': {'id': 'GCC-exception-2.0', 'deprecated': False}, + 'gcc-exception-3.1': {'id': 'GCC-exception-3.1', 'deprecated': False}, + 'gnu-javamail-exception': {'id': 'gnu-javamail-exception', 'deprecated': False}, + 'gpl-3.0-linking-exception': {'id': 'GPL-3.0-linking-exception', 'deprecated': False}, + 'gpl-3.0-linking-source-exception': {'id': 'GPL-3.0-linking-source-exception', 'deprecated': False}, + 'gpl-cc-1.0': {'id': 'GPL-CC-1.0', 'deprecated': False}, + 'gstreamer-exception-2005': {'id': 'GStreamer-exception-2005', 'deprecated': False}, + 'gstreamer-exception-2008': {'id': 'GStreamer-exception-2008', 'deprecated': False}, + 'i2p-gpl-java-exception': {'id': 'i2p-gpl-java-exception', 'deprecated': False}, + 'kicad-libraries-exception': {'id': 'KiCad-libraries-exception', 'deprecated': False}, + 'lgpl-3.0-linking-exception': {'id': 'LGPL-3.0-linking-exception', 'deprecated': False}, + 'libtool-exception': {'id': 'Libtool-exception', 'deprecated': False}, + 'linux-syscall-note': {'id': 'Linux-syscall-note', 'deprecated': False}, + 'llvm-exception': {'id': 'LLVM-exception', 'deprecated': False}, + 'lzma-exception': {'id': 'LZMA-exception', 'deprecated': False}, + 'mif-exception': {'id': 'mif-exception', 'deprecated': False}, + 'nokia-qt-exception-1.1': {'id': 'Nokia-Qt-exception-1.1', 'deprecated': True}, + 'ocaml-lgpl-linking-exception': {'id': 'OCaml-LGPL-linking-exception', 'deprecated': False}, + 'occt-exception-1.0': {'id': 'OCCT-exception-1.0', 'deprecated': False}, + 'openjdk-assembly-exception-1.0': {'id': 'OpenJDK-assembly-exception-1.0', 'deprecated': False}, + 'openvpn-openssl-exception': {'id': 'openvpn-openssl-exception', 'deprecated': False}, + 'ps-or-pdf-font-exception-20170817': {'id': 'PS-or-PDF-font-exception-20170817', 'deprecated': False}, + 'qt-gpl-exception-1.0': {'id': 'Qt-GPL-exception-1.0', 'deprecated': False}, + 'qt-lgpl-exception-1.1': {'id': 'Qt-LGPL-exception-1.1', 'deprecated': False}, + 'qwt-exception-1.0': {'id': 'Qwt-exception-1.0', 'deprecated': False}, + 'shl-2.0': {'id': 'SHL-2.0', 'deprecated': False}, + 'shl-2.1': {'id': 'SHL-2.1', 'deprecated': False}, + 'swift-exception': {'id': 'Swift-exception', 'deprecated': False}, + 'u-boot-exception-2.0': {'id': 'u-boot-exception-2.0', 'deprecated': False}, + 'universal-foss-exception-1.0': {'id': 'Universal-FOSS-exception-1.0', 'deprecated': False}, + 'wxwindows-exception-3.1': {'id': 'WxWindows-exception-3.1', 'deprecated': False}, + 'x11vnc-openssl-exception': {'id': 'x11vnc-openssl-exception', 'deprecated': False}, +} diff --git a/src/hatchling/metadata/__init__.py b/src/hatchling/metadata/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/metadata/core.py b/src/hatchling/metadata/core.py new file mode 100644 index 0000000..1de704a --- /dev/null +++ b/src/hatchling/metadata/core.py @@ -0,0 +1,1561 @@ +from __future__ import annotations + +import os +import sys +from copy import deepcopy +from typing import TYPE_CHECKING, Any, Generic, cast + +from hatchling.metadata.utils import get_normalized_dependency, is_valid_project_name, normalize_project_name +from hatchling.plugin.manager import PluginManagerBound +from hatchling.utils.constants import DEFAULT_CONFIG_FILE +from hatchling.utils.fs import locate_file + +if TYPE_CHECKING: + from packaging.requirements import Requirement + from packaging.specifiers import SpecifierSet + + from hatchling.metadata.plugin.interface import MetadataHookInterface + from hatchling.utils.context import Context + from hatchling.version.scheme.plugin.interface import VersionSchemeInterface + from hatchling.version.source.plugin.interface import VersionSourceInterface + +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + + +def load_toml(path: str) -> dict[str, Any]: + with open(path, encoding='utf-8') as f: + return tomllib.loads(f.read()) + + +class ProjectMetadata(Generic[PluginManagerBound]): + def __init__( + self, + root: str, + plugin_manager: PluginManagerBound | None, + config: dict[str, Any] | None = None, + ) -> None: + self.root = root + self.plugin_manager = plugin_manager + self._config = config + + self._context: Context | None = None + self._build: BuildMetadata | None = None + self._core: CoreMetadata | None = None + self._hatch: HatchMetadata | None = None + + self._core_raw_metadata: dict[str, Any] | None = None + self._dynamic: list[str] | None = None + self._name: str | None = None + self._version: str | None = None + self._project_file: str | None = None + + # App already loaded config + if config is not None and root is not None: + self._project_file = os.path.join(root, 'pyproject.toml') + + def has_project_file(self) -> bool: + _ = self.config + if self._project_file is None: + return False + return os.path.isfile(self._project_file) + + @property + def context(self) -> Context: + if self._context is None: + from hatchling.utils.context import Context + + self._context = Context(self.root) + + return self._context + + @property + def core_raw_metadata(self) -> dict[str, Any]: + if self._core_raw_metadata is None: + if 'project' not in self.config: + message = 'Missing `project` metadata table in configuration' + raise ValueError(message) + + core_raw_metadata = self.config['project'] + if not isinstance(core_raw_metadata, dict): + message = 'The `project` configuration must be a table' + raise TypeError(message) + + self._core_raw_metadata = core_raw_metadata + + return self._core_raw_metadata + + @property + def dynamic(self) -> list[str]: + # Keep track of the original dynamic fields before depopulation + if self._dynamic is None: + dynamic = self.core_raw_metadata.get('dynamic', []) + if not isinstance(dynamic, list): + message = 'Field `project.dynamic` must be an array' + raise TypeError(message) + + for i, field in enumerate(dynamic, 1): + if not isinstance(field, str): + message = f'Field #{i} of field `project.dynamic` must be a string' + raise TypeError(message) + + self._dynamic = list(dynamic) + + return self._dynamic + + @property + def name(self) -> str: + # Duplicate the name parsing here for situations where it's + # needed but metadata plugins might not be available + if self._name is None: + name = self.core_raw_metadata.get('name', '') + if not name: + message = 'Missing required field `project.name`' + raise ValueError(message) + + self._name = normalize_project_name(name) + + return self._name + + @property + def version(self) -> str: + """ + https://peps.python.org/pep-0621/#version + """ + if self._version is None: + self._version = self._get_version() + if 'version' in self.dynamic and 'version' in self.core_raw_metadata['dynamic']: + self.core_raw_metadata['dynamic'].remove('version') + + return self._version + + @property + def config(self) -> dict[str, Any]: + if self._config is None: + project_file = locate_file(self.root, 'pyproject.toml') + if project_file is None: + self._config = {} + else: + self._project_file = project_file + self._config = load_toml(project_file) + + return self._config + + @property + def build(self) -> BuildMetadata: + if self._build is None: + build_metadata = self.config.get('build-system', {}) + if not isinstance(build_metadata, dict): + message = 'The `build-system` configuration must be a table' + raise TypeError(message) + + self._build = BuildMetadata(self.root, build_metadata) + + return self._build + + @property + def core(self) -> CoreMetadata: + if self._core is None: + metadata = CoreMetadata(self.root, self.core_raw_metadata, self.hatch.metadata, self.context) + + # Save the fields + _ = self.dynamic + + metadata_hooks = self.hatch.metadata.hooks + if metadata_hooks: + static_fields = set(self.core_raw_metadata) + if 'version' in self.hatch.config: + self._version = self._get_version(metadata) + self.core_raw_metadata['version'] = self.version + + for metadata_hook in metadata_hooks.values(): + metadata_hook.update(self.core_raw_metadata) + metadata.add_known_classifiers(metadata_hook.get_known_classifiers()) + + new_fields = set(self.core_raw_metadata) - static_fields + for new_field in new_fields: + if new_field in metadata.dynamic: + metadata.dynamic.remove(new_field) + else: + message = ( + f'The field `{new_field}` was set dynamically and therefore must be ' + f'listed in `project.dynamic`' + ) + raise ValueError(message) + + self._core = metadata + + return self._core + + @property + def hatch(self) -> HatchMetadata: + if self._hatch is None: + tool_config = self.config.get('tool', {}) + if not isinstance(tool_config, dict): + message = 'The `tool` configuration must be a table' + raise TypeError(message) + + hatch_config = tool_config.get('hatch', {}) + if not isinstance(hatch_config, dict): + message = 'The `tool.hatch` configuration must be a table' + raise TypeError(message) + + hatch_file = ( + os.path.join(os.path.dirname(self._project_file), DEFAULT_CONFIG_FILE) + if self._project_file is not None + else locate_file(self.root, DEFAULT_CONFIG_FILE) or '' + ) + + if hatch_file and os.path.isfile(hatch_file): + config = load_toml(hatch_file) + hatch_config = hatch_config.copy() + hatch_config.update(config) + + self._hatch = HatchMetadata(self.root, hatch_config, self.plugin_manager) + + return self._hatch + + def _get_version(self, core_metadata: CoreMetadata | None = None) -> str: + if core_metadata is None: + core_metadata = self.core + + version = core_metadata.version + if version is None: + version = self.hatch.version.cached + source = f'source `{self.hatch.version.source_name}`' + core_metadata._version_set = True # noqa: SLF001 + else: + source = 'field `project.version`' + + from packaging.version import InvalidVersion, Version + + try: + normalized_version = str(Version(version)) + except InvalidVersion: + message = f'Invalid version `{version}` from {source}, see https://peps.python.org/pep-0440/' + raise ValueError(message) from None + else: + return normalized_version + + def validate_fields(self) -> None: + _ = self.version + self.core.validate_fields() + + +class BuildMetadata: + """ + https://peps.python.org/pep-0517/ + """ + + def __init__(self, root: str, config: dict[str, str | list[str]]) -> None: + self.root = root + self.config = config + + self._requires: list[str] | None = None + self._requires_complex: list[Requirement] | None = None + self._build_backend: str | None = None + self._backend_path: list[str] | None = None + + @property + def requires_complex(self) -> list[Requirement]: + if self._requires_complex is None: + from packaging.requirements import InvalidRequirement, Requirement + + requires = self.config.get('requires', []) + if not isinstance(requires, list): + message = 'Field `build-system.requires` must be an array' + raise TypeError(message) + + requires_complex = [] + + for i, entry in enumerate(requires, 1): + if not isinstance(entry, str): + message = f'Dependency #{i} of field `build-system.requires` must be a string' + raise TypeError(message) + + try: + requires_complex.append(Requirement(entry)) + except InvalidRequirement as e: + message = f'Dependency #{i} of field `build-system.requires` is invalid: {e}' + raise ValueError(message) from None + + self._requires_complex = requires_complex + + return self._requires_complex + + @property + def requires(self) -> list[str]: + if self._requires is None: + self._requires = [str(r) for r in self.requires_complex] + + return self._requires + + @property + def build_backend(self) -> str: + if self._build_backend is None: + build_backend = self.config.get('build-backend', '') + if not isinstance(build_backend, str): + message = 'Field `build-system.build-backend` must be a string' + raise TypeError(message) + + self._build_backend = build_backend + + return self._build_backend + + @property + def backend_path(self) -> list[str]: + if self._backend_path is None: + backend_path = self.config.get('backend-path', []) + if not isinstance(backend_path, list): + message = 'Field `build-system.backend-path` must be an array' + raise TypeError(message) + + for i, entry in enumerate(backend_path, 1): + if not isinstance(entry, str): + message = f'Entry #{i} of field `build-system.backend-path` must be a string' + raise TypeError(message) + + self._backend_path = backend_path + + return self._backend_path + + +class CoreMetadata: + """ + https://peps.python.org/pep-0621/ + """ + + def __init__( + self, + root: str, + config: dict[str, Any], + hatch_metadata: HatchMetadataSettings, + context: Context, + ) -> None: + self.root = root + self.config = config + self.hatch_metadata = hatch_metadata + self.context = context + + self._raw_name: str | None = None + self._name: str | None = None + self._version: str | None = None + self._description: str | None = None + self._readme: str | None = None + self._readme_content_type: str | None = None + self._readme_path: str | None = None + self._requires_python: str | None = None + self._python_constraint: SpecifierSet | None = None + self._license: str | None = None + self._license_expression: str | None = None + self._license_files: list[str] | None = None + self._authors: list[str] | None = None + self._authors_data: dict[str, list[str]] | None = None + self._maintainers: list[str] | None = None + self._maintainers_data: dict[str, list[str]] | None = None + self._keywords: list[str] | None = None + self._classifiers: list[str] | None = None + self._extra_classifiers: set[str] = set() + self._urls: dict[str, str] | None = None + self._scripts: dict[str, str] | None = None + self._gui_scripts: dict[str, str] | None = None + self._entry_points: dict[str, dict[str, str]] | None = None + self._dependencies_complex: dict[str, Requirement] | None = None + self._dependencies: list[str] | None = None + self._optional_dependencies_complex: dict[str, dict[str, Requirement]] | None = None + self._optional_dependencies: dict[str, list[str]] | None = None + self._dynamic: list[str] | None = None + + # Indicates that the version has been successfully set dynamically + self._version_set: bool = False + + @property + def raw_name(self) -> str: + """ + https://peps.python.org/pep-0621/#name + """ + if self._raw_name is None: + if 'name' in self.dynamic: + message = 'Static metadata field `name` cannot be present in field `project.dynamic`' + raise ValueError(message) + + raw_name = self.config.get('name', '') + if not raw_name: + message = 'Missing required field `project.name`' + raise ValueError(message) + + if not isinstance(raw_name, str): + message = 'Field `project.name` must be a string' + raise TypeError(message) + + if not is_valid_project_name(raw_name): + message = ( + 'Required field `project.name` must only contain ASCII letters/digits, underscores, ' + 'hyphens, and periods, and must begin and end with ASCII letters/digits.' + ) + raise ValueError(message) + + self._raw_name = raw_name + + return self._raw_name + + @property + def name(self) -> str: + """ + https://peps.python.org/pep-0621/#name + """ + if self._name is None: + self._name = normalize_project_name(self.raw_name) + + return self._name + + @property + def version(self) -> str: + """ + https://peps.python.org/pep-0621/#version + """ + version: str + + if self._version is None: + if 'version' not in self.config: + if not self._version_set and 'version' not in self.dynamic: + message = ( + 'Field `project.version` can only be resolved dynamically ' + 'if `version` is in field `project.dynamic`' + ) + raise ValueError(message) + else: + if 'version' in self.dynamic: + message = ( + 'Metadata field `version` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + + version = self.config['version'] + if not isinstance(version, str): + message = 'Field `project.version` must be a string' + raise TypeError(message) + + self._version = version + + return cast(str, self._version) + + @property + def description(self) -> str: + """ + https://peps.python.org/pep-0621/#description + """ + if self._description is None: + if 'description' in self.config: + description = self.config['description'] + if 'description' in self.dynamic: + message = ( + 'Metadata field `description` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + description = '' + + if not isinstance(description, str): + message = 'Field `project.description` must be a string' + raise TypeError(message) + self._description = ' '.join(description.splitlines()) + + return self._description + + @property + def readme(self) -> str: + """ + https://peps.python.org/pep-0621/#readme + """ + readme: str | dict[str, str] | None + content_type: str | None + + if self._readme is None: + if 'readme' in self.config: + readme = self.config['readme'] + if 'readme' in self.dynamic: + message = ( + 'Metadata field `readme` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + readme = None + + if readme is None: + self._readme = '' + self._readme_content_type = 'text/markdown' + self._readme_path = '' + elif isinstance(readme, str): + normalized_path = readme.lower() + if normalized_path.endswith('.md'): + content_type = 'text/markdown' + elif normalized_path.endswith('.rst'): + content_type = 'text/x-rst' + elif normalized_path.endswith('.txt'): + content_type = 'text/plain' + else: + message = f'Unable to determine the content-type based on the extension of readme file: {readme}' + raise TypeError(message) + + readme_path = os.path.normpath(os.path.join(self.root, readme)) + if not os.path.isfile(readme_path): + message = f'Readme file does not exist: {readme}' + raise OSError(message) + + with open(readme_path, encoding='utf-8') as f: + self._readme = f.read() + + self._readme_content_type = content_type + self._readme_path = readme + elif isinstance(readme, dict): + content_type = readme.get('content-type') + if content_type is None: + message = 'Field `content-type` is required in the `project.readme` table' + raise ValueError(message) + + if not isinstance(content_type, str): + message = 'Field `content-type` in the `project.readme` table must be a string' + raise TypeError(message) + + if content_type not in {'text/markdown', 'text/x-rst', 'text/plain'}: + message = ( + 'Field `content-type` in the `project.readme` table must be one of the following: ' + 'text/markdown, text/x-rst, text/plain' + ) + raise ValueError(message) + + if 'file' in readme and 'text' in readme: + message = 'Cannot specify both `file` and `text` in the `project.readme` table' + raise ValueError(message) + + if 'file' in readme: + relative_path = readme['file'] + if not isinstance(relative_path, str): + message = 'Field `file` in the `project.readme` table must be a string' + raise TypeError(message) + + path = os.path.normpath(os.path.join(self.root, relative_path)) + if not os.path.isfile(path): + message = f'Readme file does not exist: {relative_path}' + raise OSError(message) + + with open(path, encoding=readme.get('charset', 'utf-8')) as f: + contents = f.read() + + readme_path = relative_path + elif 'text' in readme: + contents = readme['text'] + if not isinstance(contents, str): + message = 'Field `text` in the `project.readme` table must be a string' + raise TypeError(message) + + readme_path = '' + else: + message = 'Must specify either `file` or `text` in the `project.readme` table' + raise ValueError(message) + + self._readme = contents + self._readme_content_type = content_type + self._readme_path = readme_path + else: + message = 'Field `project.readme` must be a string or a table' + raise TypeError(message) + + return self._readme + + @property + def readme_content_type(self) -> str: + """ + https://peps.python.org/pep-0621/#readme + """ + if self._readme_content_type is None: + _ = self.readme + + return cast(str, self._readme_content_type) + + @property + def readme_path(self) -> str: + """ + https://peps.python.org/pep-0621/#readme + """ + if self._readme_path is None: + _ = self.readme + + return cast(str, self._readme_path) + + @property + def requires_python(self) -> str: + """ + https://peps.python.org/pep-0621/#requires-python + """ + if self._requires_python is None: + from packaging.specifiers import InvalidSpecifier, SpecifierSet + + if 'requires-python' in self.config: + requires_python = self.config['requires-python'] + if 'requires-python' in self.dynamic: + message = ( + 'Metadata field `requires-python` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + requires_python = '' + + if not isinstance(requires_python, str): + message = 'Field `project.requires-python` must be a string' + raise TypeError(message) + + try: + self._python_constraint = SpecifierSet(requires_python) + except InvalidSpecifier as e: + message = f'Field `project.requires-python` is invalid: {e}' + raise ValueError(message) from None + + self._requires_python = str(self._python_constraint) + + return self._requires_python + + @property + def python_constraint(self) -> SpecifierSet: + from packaging.specifiers import SpecifierSet + + if self._python_constraint is None: + _ = self.requires_python + + return cast(SpecifierSet, self._python_constraint) + + @property + def license(self) -> str: # noqa: A003 + """ + https://peps.python.org/pep-0621/#license + """ + if self._license is None: + if 'license' in self.config: + data = self.config['license'] + if 'license' in self.dynamic: + message = ( + 'Metadata field `license` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + data = None + + if data is None: + self._license = '' + self._license_expression = '' + elif isinstance(data, str): + from hatchling.licenses.parse import normalize_license_expression + + try: + self._license_expression = normalize_license_expression(data) + except ValueError as e: + message = f'Error parsing field `project.license` - {e}' + raise ValueError(message) from None + + self._license = '' + elif isinstance(data, dict): + if 'file' in data and 'text' in data: + message = 'Cannot specify both `file` and `text` in the `project.license` table' + raise ValueError(message) + + if 'file' in data: + relative_path = data['file'] + if not isinstance(relative_path, str): + message = 'Field `file` in the `project.license` table must be a string' + raise TypeError(message) + + path = os.path.normpath(os.path.join(self.root, relative_path)) + if not os.path.isfile(path): + message = f'License file does not exist: {relative_path}' + raise OSError(message) + + with open(path, encoding='utf-8') as f: + contents = f.read() + elif 'text' in data: + contents = data['text'] + if not isinstance(contents, str): + message = 'Field `text` in the `project.license` table must be a string' + raise TypeError(message) + else: + message = 'Must specify either `file` or `text` in the `project.license` table' + raise ValueError(message) + + self._license = contents + self._license_expression = '' + else: + message = 'Field `project.license` must be a string or a table' + raise TypeError(message) + + return self._license + + @property + def license_expression(self) -> str: + """ + https://peps.python.org/pep-0639/ + """ + if self._license_expression is None: + _ = self.license + + return cast(str, self._license_expression) + + @property + def license_files(self) -> list[str]: + """ + https://peps.python.org/pep-0639/ + """ + if self._license_files is None: + if 'license-files' not in self.config: + data = {'globs': ['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']} + else: + if 'license-files' in self.dynamic: + message = ( + 'Metadata field `license-files` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + + data = self.config['license-files'] + if not isinstance(data, dict): + message = 'Field `project.license-files` must be a table' + raise TypeError(message) + + if 'paths' in data and 'globs' in data: + message = 'Cannot specify both `paths` and `globs` in the `project.license-files` table' + raise ValueError(message) + + license_files = [] + if 'paths' in data: + paths = data['paths'] + if not isinstance(paths, list): + message = 'Field `paths` in the `project.license-files` table must be an array' + raise TypeError(message) + + for i, relative_path in enumerate(paths, 1): + if not isinstance(relative_path, str): + message = f'Entry #{i} in field `paths` in the `project.license-files` table must be a string' + raise TypeError(message) + + path = os.path.normpath(os.path.join(self.root, relative_path)) + if not os.path.isfile(path): + message = f'License file does not exist: {relative_path}' + raise OSError(message) + + license_files.append(os.path.relpath(path, self.root).replace('\\', '/')) + elif 'globs' in data: + from glob import glob + + globs = data['globs'] + if not isinstance(globs, list): + message = 'Field `globs` in the `project.license-files` table must be an array' + raise TypeError(message) + + for i, pattern in enumerate(globs, 1): + if not isinstance(pattern, str): + message = f'Entry #{i} in field `globs` in the `project.license-files` table must be a string' + raise TypeError(message) + + full_pattern = os.path.normpath(os.path.join(self.root, pattern)) + license_files.extend( + os.path.relpath(path, self.root).replace('\\', '/') + for path in glob(full_pattern) + if os.path.isfile(path) + ) + else: + message = 'Must specify either `paths` or `globs` in the `project.license-files` table if defined' + raise ValueError(message) + + self._license_files = sorted(license_files) + + return self._license_files + + @property + def authors(self) -> list[str]: + """ + https://peps.python.org/pep-0621/#authors-maintainers + """ + authors: list[str] + authors_data: dict[str, list[str]] + + if self._authors is None: + if 'authors' in self.config: + authors = self.config['authors'] + if 'authors' in self.dynamic: + message = ( + 'Metadata field `authors` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + authors = [] + + if not isinstance(authors, list): + message = 'Field `project.authors` must be an array' + raise TypeError(message) + + from email.headerregistry import Address + + authors = deepcopy(authors) + authors_data = {'name': [], 'email': []} + + for i, data in enumerate(authors, 1): + if not isinstance(data, dict): + message = f'Author #{i} of field `project.authors` must be an inline table' + raise TypeError(message) + + name = data.get('name', '') + if not isinstance(name, str): + message = f'Name of author #{i} of field `project.authors` must be a string' + raise TypeError(message) + + email = data.get('email', '') + if not isinstance(email, str): + message = f'Email of author #{i} of field `project.authors` must be a string' + raise TypeError(message) + + if name and email: + authors_data['email'].append(str(Address(display_name=name, addr_spec=email))) + elif email: + authors_data['email'].append(str(Address(addr_spec=email))) + elif name: + authors_data['name'].append(name) + else: + message = f'Author #{i} of field `project.authors` must specify either `name` or `email`' + raise ValueError(message) + + self._authors = authors + self._authors_data = authors_data + + return self._authors + + @property + def authors_data(self) -> dict[str, list[str]]: + """ + https://peps.python.org/pep-0621/#authors-maintainers + """ + if self._authors_data is None: + _ = self.authors + + return cast(dict, self._authors_data) + + @property + def maintainers(self) -> list[str]: + """ + https://peps.python.org/pep-0621/#authors-maintainers + """ + maintainers: list[str] + + if self._maintainers is None: + if 'maintainers' in self.config: + maintainers = self.config['maintainers'] + if 'maintainers' in self.dynamic: + message = ( + 'Metadata field `maintainers` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + maintainers = [] + + if not isinstance(maintainers, list): + message = 'Field `project.maintainers` must be an array' + raise TypeError(message) + + from email.headerregistry import Address + + maintainers = deepcopy(maintainers) + maintainers_data: dict[str, list[str]] = {'name': [], 'email': []} + + for i, data in enumerate(maintainers, 1): + if not isinstance(data, dict): + message = f'Maintainer #{i} of field `project.maintainers` must be an inline table' + raise TypeError(message) + + name = data.get('name', '') + if not isinstance(name, str): + message = f'Name of maintainer #{i} of field `project.maintainers` must be a string' + raise TypeError(message) + + email = data.get('email', '') + if not isinstance(email, str): + message = f'Email of maintainer #{i} of field `project.maintainers` must be a string' + raise TypeError(message) + + if name and email: + maintainers_data['email'].append(str(Address(display_name=name, addr_spec=email))) + elif email: + maintainers_data['email'].append(str(Address(addr_spec=email))) + elif name: + maintainers_data['name'].append(name) + else: + message = f'Maintainer #{i} of field `project.maintainers` must specify either `name` or `email`' + raise ValueError(message) + + self._maintainers = maintainers + self._maintainers_data = maintainers_data + + return self._maintainers + + @property + def maintainers_data(self) -> dict[str, list[str]]: + """ + https://peps.python.org/pep-0621/#authors-maintainers + """ + if self._maintainers_data is None: + _ = self.maintainers + + return cast(dict, self._maintainers_data) + + @property + def keywords(self) -> list[str]: + """ + https://peps.python.org/pep-0621/#keywords + """ + if self._keywords is None: + if 'keywords' in self.config: + keywords = self.config['keywords'] + if 'keywords' in self.dynamic: + message = ( + 'Metadata field `keywords` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + keywords = [] + + if not isinstance(keywords, list): + message = 'Field `project.keywords` must be an array' + raise TypeError(message) + + unique_keywords = set() + + for i, keyword in enumerate(keywords, 1): + if not isinstance(keyword, str): + message = f'Keyword #{i} of field `project.keywords` must be a string' + raise TypeError(message) + + unique_keywords.add(keyword) + + self._keywords = sorted(unique_keywords) + + return self._keywords + + @property + def classifiers(self) -> list[str]: + """ + https://peps.python.org/pep-0621/#classifiers + """ + if self._classifiers is None: + import bisect + + import trove_classifiers + + if 'classifiers' in self.config: + classifiers = self.config['classifiers'] + if 'classifiers' in self.dynamic: + message = ( + 'Metadata field `classifiers` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + classifiers = [] + + if not isinstance(classifiers, list): + message = 'Field `project.classifiers` must be an array' + raise TypeError(message) + + known_classifiers = trove_classifiers.classifiers | self._extra_classifiers + unique_classifiers = set() + + for i, classifier in enumerate(classifiers, 1): + if not isinstance(classifier, str): + message = f'Classifier #{i} of field `project.classifiers` must be a string' + raise TypeError(message) + + if not self.__classifier_is_private(classifier) and classifier not in known_classifiers: + message = f'Unknown classifier in field `project.classifiers`: {classifier}' + raise ValueError(message) + + unique_classifiers.add(classifier) + + sorted_classifiers = list(trove_classifiers.sorted_classifiers) + for classifier in sorted(self._extra_classifiers - trove_classifiers.classifiers): + bisect.insort(sorted_classifiers, classifier) + + self._classifiers = sorted( + unique_classifiers, key=lambda c: -1 if self.__classifier_is_private(c) else sorted_classifiers.index(c) + ) + + return self._classifiers + + @property + def urls(self) -> dict[str, str]: + """ + https://peps.python.org/pep-0621/#urls + """ + if self._urls is None: + if 'urls' in self.config: + urls = self.config['urls'] + if 'urls' in self.dynamic: + message = ( + 'Metadata field `urls` cannot be both statically defined and listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + urls = {} + + if not isinstance(urls, dict): + message = 'Field `project.urls` must be a table' + raise TypeError(message) + + sorted_urls = {} + + for label, url in urls.items(): + if not isinstance(url, str): + message = f'URL `{label}` of field `project.urls` must be a string' + raise TypeError(message) + + sorted_urls[label] = url + + self._urls = sorted_urls + + return self._urls + + @property + def scripts(self) -> dict[str, str]: + """ + https://peps.python.org/pep-0621/#entry-points + """ + if self._scripts is None: + if 'scripts' in self.config: + scripts = self.config['scripts'] + if 'scripts' in self.dynamic: + message = ( + 'Metadata field `scripts` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + scripts = {} + + if not isinstance(scripts, dict): + message = 'Field `project.scripts` must be a table' + raise TypeError(message) + + sorted_scripts = {} + + for name, object_ref in sorted(scripts.items()): + if not isinstance(object_ref, str): + message = f'Object reference `{name}` of field `project.scripts` must be a string' + raise TypeError(message) + + sorted_scripts[name] = object_ref + + self._scripts = sorted_scripts + + return self._scripts + + @property + def gui_scripts(self) -> dict[str, str]: + """ + https://peps.python.org/pep-0621/#entry-points + """ + if self._gui_scripts is None: + if 'gui-scripts' in self.config: + gui_scripts = self.config['gui-scripts'] + if 'gui-scripts' in self.dynamic: + message = ( + 'Metadata field `gui-scripts` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + gui_scripts = {} + + if not isinstance(gui_scripts, dict): + message = 'Field `project.gui-scripts` must be a table' + raise TypeError(message) + + sorted_gui_scripts = {} + + for name, object_ref in sorted(gui_scripts.items()): + if not isinstance(object_ref, str): + message = f'Object reference `{name}` of field `project.gui-scripts` must be a string' + raise TypeError(message) + + sorted_gui_scripts[name] = object_ref + + self._gui_scripts = sorted_gui_scripts + + return self._gui_scripts + + @property + def entry_points(self) -> dict[str, dict[str, str]]: + """ + https://peps.python.org/pep-0621/#entry-points + """ + if self._entry_points is None: + if 'entry-points' in self.config: + defined_entry_point_groups = self.config['entry-points'] + if 'entry-points' in self.dynamic: + message = ( + 'Metadata field `entry-points` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + defined_entry_point_groups = {} + + if not isinstance(defined_entry_point_groups, dict): + message = 'Field `project.entry-points` must be a table' + raise TypeError(message) + + for forbidden_field, expected_field in (('console_scripts', 'scripts'), ('gui-scripts', 'gui-scripts')): + if forbidden_field in defined_entry_point_groups: + message = ( + f'Field `{forbidden_field}` must be defined as `project.{expected_field}` ' + f'instead of in the `project.entry-points` table' + ) + raise ValueError(message) + + entry_point_groups = {} + + for group, entry_point_data in sorted(defined_entry_point_groups.items()): + if not isinstance(entry_point_data, dict): + message = f'Field `project.entry-points.{group}` must be a table' + raise TypeError(message) + + entry_points = {} + + for name, object_ref in sorted(entry_point_data.items()): + if not isinstance(object_ref, str): + message = f'Object reference `{name}` of field `project.entry-points.{group}` must be a string' + raise TypeError(message) + + entry_points[name] = object_ref + + if entry_points: + entry_point_groups[group] = entry_points + + self._entry_points = entry_point_groups + + return self._entry_points + + @property + def dependencies_complex(self) -> dict[str, Requirement]: + """ + https://peps.python.org/pep-0621/#dependencies-optional-dependencies + """ + if self._dependencies_complex is None: + from packaging.requirements import InvalidRequirement, Requirement + + if 'dependencies' in self.config: + dependencies = self.config['dependencies'] + if 'dependencies' in self.dynamic: + message = ( + 'Metadata field `dependencies` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + dependencies = [] + + if not isinstance(dependencies, list): + message = 'Field `project.dependencies` must be an array' + raise TypeError(message) + + dependencies_complex = {} + + for i, entry in enumerate(dependencies, 1): + if not isinstance(entry, str): + message = f'Dependency #{i} of field `project.dependencies` must be a string' + raise TypeError(message) + + try: + requirement = Requirement(self.context.format(entry)) + except InvalidRequirement as e: + message = f'Dependency #{i} of field `project.dependencies` is invalid: {e}' + raise ValueError(message) from None + else: + if requirement.url and not self.hatch_metadata.allow_direct_references: + message = ( + f'Dependency #{i} of field `project.dependencies` cannot be a direct reference unless ' + f'field `tool.hatch.metadata.allow-direct-references` is set to `true`' + ) + raise ValueError(message) + + dependencies_complex[get_normalized_dependency(requirement)] = requirement + + self._dependencies_complex = dict(sorted(dependencies_complex.items())) + + return self._dependencies_complex + + @property + def dependencies(self) -> list[str]: + """ + https://peps.python.org/pep-0621/#dependencies-optional-dependencies + """ + if self._dependencies is None: + self._dependencies = list(self.dependencies_complex) + + return self._dependencies + + @property + def optional_dependencies_complex(self) -> dict[str, dict[str, Requirement]]: + """ + https://peps.python.org/pep-0621/#dependencies-optional-dependencies + """ + if self._optional_dependencies_complex is None: + from packaging.requirements import InvalidRequirement, Requirement + + if 'optional-dependencies' in self.config: + optional_dependencies = self.config['optional-dependencies'] + if 'optional-dependencies' in self.dynamic: + message = ( + 'Metadata field `optional-dependencies` cannot be both statically defined and ' + 'listed in field `project.dynamic`' + ) + raise ValueError(message) + else: + optional_dependencies = {} + + if not isinstance(optional_dependencies, dict): + message = 'Field `project.optional-dependencies` must be a table' + raise TypeError(message) + + normalized_options: dict[str, str] = {} + optional_dependency_entries = {} + + for option, dependencies in optional_dependencies.items(): + if not is_valid_project_name(option): + message = ( + f'Optional dependency group `{option}` of field `project.optional-dependencies` must only ' + f'contain ASCII letters/digits, underscores, hyphens, and periods, and must begin and end with ' + f'ASCII letters/digits.' + ) + raise ValueError(message) + + if not isinstance(dependencies, list): + message = ( + f'Dependencies for option `{option}` of field `project.optional-dependencies` must be an array' + ) + raise TypeError(message) + + entries = {} + + for i, entry in enumerate(dependencies, 1): + if not isinstance(entry, str): + message = ( + f'Dependency #{i} of option `{option}` of field `project.optional-dependencies` ' + f'must be a string' + ) + raise TypeError(message) + + try: + requirement = Requirement(self.context.format(entry)) + except InvalidRequirement as e: + message = ( + f'Dependency #{i} of option `{option}` of field `project.optional-dependencies` ' + f'is invalid: {e}' + ) + raise ValueError(message) from None + else: + if requirement.url and not self.hatch_metadata.allow_direct_references: + message = ( + f'Dependency #{i} of option `{option}` of field `project.optional-dependencies` ' + f'cannot be a direct reference unless field ' + f'`tool.hatch.metadata.allow-direct-references` is set to `true`' + ) + raise ValueError(message) + + entries[get_normalized_dependency(requirement)] = requirement + + normalized_option = ( + option if self.hatch_metadata.allow_ambiguous_features else normalize_project_name(option) + ) + if normalized_option in normalized_options: + message = ( + f'Optional dependency groups `{normalized_options[normalized_option]}` and `{option}` of ' + f'field `project.optional-dependencies` both evaluate to `{normalized_option}`.' + ) + raise ValueError(message) + + normalized_options[normalized_option] = option + optional_dependency_entries[normalized_option] = dict(sorted(entries.items())) + + self._optional_dependencies_complex = dict(sorted(optional_dependency_entries.items())) + + return self._optional_dependencies_complex + + @property + def optional_dependencies(self) -> dict[str, list[str]]: + """ + https://peps.python.org/pep-0621/#dependencies-optional-dependencies + """ + if self._optional_dependencies is None: + self._optional_dependencies = { + option: list(entries) for option, entries in self.optional_dependencies_complex.items() + } + + return self._optional_dependencies + + @property + def dynamic(self) -> list[str]: + """ + https://peps.python.org/pep-0621/#dynamic + """ + if self._dynamic is None: + self._dynamic = self.config.get('dynamic', []) + + if not isinstance(self._dynamic, list): + message = 'Field `project.dynamic` must be an array' + raise TypeError(message) + + if not all(isinstance(entry, str) for entry in self._dynamic): + message = 'Field `project.dynamic` must only contain strings' + raise TypeError(message) + + return self._dynamic + + def add_known_classifiers(self, classifiers: list[str]) -> None: + self._extra_classifiers.update(classifiers) + + def validate_fields(self) -> None: + # Trigger validation for everything + for attribute in dir(self): + getattr(self, attribute) + + @staticmethod + def __classifier_is_private(classifier: str) -> bool: + return classifier.lower().startswith('private ::') + + +class HatchMetadata(Generic[PluginManagerBound]): + def __init__(self, root: str, config: dict[str, dict[str, Any]], plugin_manager: PluginManagerBound) -> None: + self.root = root + self.config = config + self.plugin_manager = plugin_manager + + self._metadata: HatchMetadataSettings | None = None + self._build_config: dict[str, Any] | None = None + self._build_targets: dict[str, Any] | None = None + self._version: HatchVersionConfig | None = None + + @property + def metadata(self) -> HatchMetadataSettings: + if self._metadata is None: + metadata_config = self.config.get('metadata', {}) + if not isinstance(metadata_config, dict): + message = 'Field `tool.hatch.metadata` must be a table' + raise TypeError(message) + + self._metadata = HatchMetadataSettings(self.root, metadata_config, self.plugin_manager) + + return self._metadata + + @property + def build_config(self) -> dict[str, Any]: + if self._build_config is None: + build_config = self.config.get('build', {}) + if not isinstance(build_config, dict): + message = 'Field `tool.hatch.build` must be a table' + raise TypeError(message) + + self._build_config = build_config + + return self._build_config + + @property + def build_targets(self) -> dict[str, Any]: + if self._build_targets is None: + build_targets: dict = self.build_config.get('targets', {}) + if not isinstance(build_targets, dict): + message = 'Field `tool.hatch.build.targets` must be a table' + raise TypeError(message) + + self._build_targets = build_targets + + return self._build_targets + + @property + def version(self) -> HatchVersionConfig: + if self._version is None: + if 'version' not in self.config: + message = 'Missing `tool.hatch.version` configuration' + raise ValueError(message) + + options = self.config['version'] + if not isinstance(options, dict): + message = 'Field `tool.hatch.version` must be a table' + raise TypeError(message) + + self._version = HatchVersionConfig(self.root, deepcopy(options), self.plugin_manager) + + return self._version + + +class HatchVersionConfig(Generic[PluginManagerBound]): + def __init__(self, root: str, config: dict[str, Any], plugin_manager: PluginManagerBound) -> None: + self.root = root + self.config = config + self.plugin_manager = plugin_manager + + self._cached: str | None = None + self._source_name: str | None = None + self._scheme_name: str | None = None + self._source: VersionSourceInterface | None = None + self._scheme: VersionSchemeInterface | None = None + + @property + def cached(self) -> str: + if self._cached is None: + try: + self._cached = self.source.get_version_data()['version'] + except Exception as e: # noqa: BLE001 + message = f'Error getting the version from source `{self.source.PLUGIN_NAME}`: {e}' + raise type(e)(message) from None + + return self._cached + + @property + def source_name(self) -> str: + if self._source_name is None: + source: str = self.config.get('source', 'regex') + if not source: + message = 'The `source` option under the `tool.hatch.version` table must not be empty if defined' + raise ValueError(message) + + if not isinstance(source, str): + message = 'Field `tool.hatch.version.source` must be a string' + raise TypeError(message) + + self._source_name = source + + return self._source_name + + @property + def scheme_name(self) -> str: + if self._scheme_name is None: + scheme: str = self.config.get('scheme', 'standard') + if not scheme: + message = 'The `scheme` option under the `tool.hatch.version` table must not be empty if defined' + raise ValueError(message) + + if not isinstance(scheme, str): + message = 'Field `tool.hatch.version.scheme` must be a string' + raise TypeError(message) + + self._scheme_name = scheme + + return self._scheme_name + + @property + def source(self) -> VersionSourceInterface: + if self._source is None: + from copy import deepcopy + + source_name = self.source_name + version_source = self.plugin_manager.version_source.get(source_name) + if version_source is None: + from hatchling.plugin.exceptions import UnknownPluginError + + message = f'Unknown version source: {source_name}' + raise UnknownPluginError(message) + + self._source = version_source(self.root, deepcopy(self.config)) + + return self._source + + @property + def scheme(self) -> VersionSchemeInterface: + if self._scheme is None: + from copy import deepcopy + + scheme_name = self.scheme_name + version_scheme = self.plugin_manager.version_scheme.get(scheme_name) + if version_scheme is None: + from hatchling.plugin.exceptions import UnknownPluginError + + message = f'Unknown version scheme: {scheme_name}' + raise UnknownPluginError(message) + + self._scheme = version_scheme(self.root, deepcopy(self.config)) + + return self._scheme + + +class HatchMetadataSettings(Generic[PluginManagerBound]): + def __init__(self, root: str, config: dict[str, Any], plugin_manager: PluginManagerBound) -> None: + self.root = root + self.config = config + self.plugin_manager = plugin_manager + + self._allow_direct_references: bool | None = None + self._allow_ambiguous_features: bool | None = None + self._hook_config: dict[str, Any] | None = None + self._hooks: dict[str, MetadataHookInterface] | None = None + + @property + def allow_direct_references(self) -> bool: + if self._allow_direct_references is None: + allow_direct_references: bool = self.config.get('allow-direct-references', False) + if not isinstance(allow_direct_references, bool): + message = 'Field `tool.hatch.metadata.allow-direct-references` must be a boolean' + raise TypeError(message) + + self._allow_direct_references = allow_direct_references + + return self._allow_direct_references + + @property + def allow_ambiguous_features(self) -> bool: + # TODO: remove in the first minor release after Jan 1, 2024 + if self._allow_ambiguous_features is None: + allow_ambiguous_features: bool = self.config.get('allow-ambiguous-features', False) + if not isinstance(allow_ambiguous_features, bool): + message = 'Field `tool.hatch.metadata.allow-ambiguous-features` must be a boolean' + raise TypeError(message) + + self._allow_ambiguous_features = allow_ambiguous_features + + return self._allow_ambiguous_features + + @property + def hook_config(self) -> dict[str, Any]: + if self._hook_config is None: + hook_config: dict[str, Any] = self.config.get('hooks', {}) + if not isinstance(hook_config, dict): + message = 'Field `tool.hatch.metadata.hooks` must be a table' + raise TypeError(message) + + self._hook_config = hook_config + + return self._hook_config + + @property + def hooks(self) -> dict[str, MetadataHookInterface]: + if self._hooks is None: + hook_config = self.hook_config + + configured_hooks = {} + for hook_name, config in hook_config.items(): + metadata_hook = self.plugin_manager.metadata_hook.get(hook_name) + if metadata_hook is None: + from hatchling.plugin.exceptions import UnknownPluginError + + message = f'Unknown metadata hook: {hook_name}' + raise UnknownPluginError(message) + + configured_hooks[hook_name] = metadata_hook(self.root, config) + + self._hooks = configured_hooks + + return self._hooks diff --git a/src/hatchling/metadata/custom.py b/src/hatchling/metadata/custom.py new file mode 100644 index 0000000..148d14b --- /dev/null +++ b/src/hatchling/metadata/custom.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +import os +from typing import Any + +from hatchling.metadata.plugin.interface import MetadataHookInterface +from hatchling.plugin.utils import load_plugin_from_script +from hatchling.utils.constants import DEFAULT_BUILD_SCRIPT + + +class CustomMetadataHook: + PLUGIN_NAME = 'custom' + + def __new__( # type: ignore + cls, + root: str, + config: dict[str, Any], + *args: Any, + **kwargs: Any, + ) -> MetadataHookInterface: + build_script = config.get('path', DEFAULT_BUILD_SCRIPT) + if not isinstance(build_script, str): + message = f'Option `path` for metadata hook `{cls.PLUGIN_NAME}` must be a string' + raise TypeError(message) + + if not build_script: + message = f'Option `path` for metadata hook `{cls.PLUGIN_NAME}` must not be empty if defined' + raise ValueError(message) + + path = os.path.normpath(os.path.join(root, build_script)) + if not os.path.isfile(path): + message = f'Build script does not exist: {build_script}' + raise OSError(message) + + hook_class = load_plugin_from_script(path, build_script, MetadataHookInterface, 'metadata_hook') # type: ignore + hook = hook_class(root, config, *args, **kwargs) + + # Always keep the name to avoid confusion + hook.PLUGIN_NAME = cls.PLUGIN_NAME + + return hook diff --git a/src/hatchling/metadata/plugin/__init__.py b/src/hatchling/metadata/plugin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/metadata/plugin/hooks.py b/src/hatchling/metadata/plugin/hooks.py new file mode 100644 index 0000000..7e8e367 --- /dev/null +++ b/src/hatchling/metadata/plugin/hooks.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from hatchling.metadata.custom import CustomMetadataHook +from hatchling.plugin import hookimpl + +if TYPE_CHECKING: + from hatchling.metadata.plugin.interface import MetadataHookInterface + + +@hookimpl +def hatch_register_metadata_hook() -> type[MetadataHookInterface]: + return CustomMetadataHook # type: ignore diff --git a/src/hatchling/metadata/plugin/interface.py b/src/hatchling/metadata/plugin/interface.py new file mode 100644 index 0000000..b6c8fdb --- /dev/null +++ b/src/hatchling/metadata/plugin/interface.py @@ -0,0 +1,66 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod + + +class MetadataHookInterface(ABC): # no cov + """ + Example usage: + + ```python tab="plugin.py" + from hatchling.metadata.plugin.interface import MetadataHookInterface + + + class SpecialMetadataHook(MetadataHookInterface): + PLUGIN_NAME = 'special' + ... + ``` + + ```python tab="hooks.py" + from hatchling.plugin import hookimpl + + from .plugin import SpecialMetadataHook + + + @hookimpl + def hatch_register_metadata_hook(): + return SpecialMetadataHook + ``` + """ + + PLUGIN_NAME = '' + """The name used for selection.""" + + def __init__(self, root: str, config: dict) -> None: + self.__root = root + self.__config = config + + @property + def root(self) -> str: + """ + The root of the project tree. + """ + return self.__root + + @property + def config(self) -> dict: + """ + The hook configuration. + + ```toml config-example + [tool.hatch.metadata.hooks.] + ``` + """ + return self.__config + + @abstractmethod + def update(self, metadata: dict) -> None: + """ + This updates the metadata mapping of the `project` table in-place. + """ + + def get_known_classifiers(self) -> list[str]: # noqa: PLR6301 + """ + This returns extra classifiers that should be considered valid in addition to the ones known to PyPI. + """ + return [] diff --git a/src/hatchling/metadata/spec.py b/src/hatchling/metadata/spec.py new file mode 100644 index 0000000..8e9ce69 --- /dev/null +++ b/src/hatchling/metadata/spec.py @@ -0,0 +1,314 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Callable + +if TYPE_CHECKING: + from hatchling.metadata.core import ProjectMetadata + +DEFAULT_METADATA_VERSION = '2.1' + + +def get_core_metadata_constructors() -> dict[str, Callable]: + """ + https://packaging.python.org/specifications/core-metadata/ + """ + return { + '1.2': construct_metadata_file_1_2, + '2.1': construct_metadata_file_2_1, + '2.2': construct_metadata_file_2_2, + '2.3': construct_metadata_file_2_3, + } + + +def construct_metadata_file_1_2(metadata: ProjectMetadata, extra_dependencies: tuple[str] | None = None) -> str: + """ + https://peps.python.org/pep-0345/ + """ + metadata_file = 'Metadata-Version: 1.2\n' + metadata_file += f'Name: {metadata.core.raw_name}\n' + metadata_file += f'Version: {metadata.version}\n' + + if metadata.core.description: + metadata_file += f'Summary: {metadata.core.description}\n' + + if metadata.core.urls: + for label, url in metadata.core.urls.items(): + metadata_file += f'Project-URL: {label}, {url}\n' + + authors_data = metadata.core.authors_data + if authors_data['name']: + metadata_file += f"Author: {', '.join(authors_data['name'])}\n" + if authors_data['email']: + metadata_file += f"Author-email: {', '.join(authors_data['email'])}\n" + + maintainers_data = metadata.core.maintainers_data + if maintainers_data['name']: + metadata_file += f"Maintainer: {', '.join(maintainers_data['name'])}\n" + if maintainers_data['email']: + metadata_file += f"Maintainer-email: {', '.join(maintainers_data['email'])}\n" + + if metadata.core.license: + license_start = 'License: ' + indent = ' ' * (len(license_start) - 1) + metadata_file += license_start + + for i, line in enumerate(metadata.core.license.splitlines()): + if i == 0: + metadata_file += f'{line}\n' + else: + metadata_file += f'{indent}{line}\n' + + if metadata.core.keywords: + metadata_file += f"Keywords: {','.join(metadata.core.keywords)}\n" + + if metadata.core.classifiers: + for classifier in metadata.core.classifiers: + metadata_file += f'Classifier: {classifier}\n' + + if metadata.core.requires_python: + metadata_file += f'Requires-Python: {metadata.core.requires_python}\n' + + if metadata.core.dependencies: + for dependency in metadata.core.dependencies: + metadata_file += f'Requires-Dist: {dependency}\n' + + if extra_dependencies: + for dependency in extra_dependencies: + metadata_file += f'Requires-Dist: {dependency}\n' + + return metadata_file + + +def construct_metadata_file_2_1(metadata: ProjectMetadata, extra_dependencies: tuple[str] | None = None) -> str: + """ + https://peps.python.org/pep-0566/ + """ + metadata_file = 'Metadata-Version: 2.1\n' + metadata_file += f'Name: {metadata.core.raw_name}\n' + metadata_file += f'Version: {metadata.version}\n' + + if metadata.core.description: + metadata_file += f'Summary: {metadata.core.description}\n' + + if metadata.core.urls: + for label, url in metadata.core.urls.items(): + metadata_file += f'Project-URL: {label}, {url}\n' + + authors_data = metadata.core.authors_data + if authors_data['name']: + metadata_file += f"Author: {', '.join(authors_data['name'])}\n" + if authors_data['email']: + metadata_file += f"Author-email: {', '.join(authors_data['email'])}\n" + + maintainers_data = metadata.core.maintainers_data + if maintainers_data['name']: + metadata_file += f"Maintainer: {', '.join(maintainers_data['name'])}\n" + if maintainers_data['email']: + metadata_file += f"Maintainer-email: {', '.join(maintainers_data['email'])}\n" + + if metadata.core.license: + license_start = 'License: ' + indent = ' ' * (len(license_start) - 1) + metadata_file += license_start + + for i, line in enumerate(metadata.core.license.splitlines()): + if i == 0: + metadata_file += f'{line}\n' + else: + metadata_file += f'{indent}{line}\n' + + if metadata.core.license_expression: + metadata_file += f'License-Expression: {metadata.core.license_expression}\n' + + if metadata.core.license_files: + for license_file in metadata.core.license_files: + metadata_file += f'License-File: {license_file}\n' + + if metadata.core.keywords: + metadata_file += f"Keywords: {','.join(metadata.core.keywords)}\n" + + if metadata.core.classifiers: + for classifier in metadata.core.classifiers: + metadata_file += f'Classifier: {classifier}\n' + + if metadata.core.requires_python: + metadata_file += f'Requires-Python: {metadata.core.requires_python}\n' + + if metadata.core.dependencies: + for dependency in metadata.core.dependencies: + metadata_file += f'Requires-Dist: {dependency}\n' + + if extra_dependencies: + for dependency in extra_dependencies: + metadata_file += f'Requires-Dist: {dependency}\n' + + if metadata.core.optional_dependencies: + for option, dependencies in metadata.core.optional_dependencies.items(): + metadata_file += f'Provides-Extra: {option}\n' + for dependency in dependencies: + if ';' in dependency: + dep_name, dep_env_marker = dependency.split(';', maxsplit=1) + metadata_file += f'Requires-Dist: {dep_name}; ({dep_env_marker.strip()}) and extra == {option!r}\n' + elif '@ ' in dependency: + metadata_file += f'Requires-Dist: {dependency} ; extra == {option!r}\n' + else: + metadata_file += f'Requires-Dist: {dependency}; extra == {option!r}\n' + + if metadata.core.readme: + metadata_file += f'Description-Content-Type: {metadata.core.readme_content_type}\n' + metadata_file += f'\n{metadata.core.readme}' + + return metadata_file + + +def construct_metadata_file_2_2(metadata: ProjectMetadata, extra_dependencies: tuple[str] | None = None) -> str: + """ + https://peps.python.org/pep-0643/ + """ + metadata_file = 'Metadata-Version: 2.2\n' + metadata_file += f'Name: {metadata.core.raw_name}\n' + metadata_file += f'Version: {metadata.version}\n' + + if metadata.core.description: + metadata_file += f'Summary: {metadata.core.description}\n' + + if metadata.core.urls: + for label, url in metadata.core.urls.items(): + metadata_file += f'Project-URL: {label}, {url}\n' + + authors_data = metadata.core.authors_data + if authors_data['name']: + metadata_file += f"Author: {', '.join(authors_data['name'])}\n" + if authors_data['email']: + metadata_file += f"Author-email: {', '.join(authors_data['email'])}\n" + + maintainers_data = metadata.core.maintainers_data + if maintainers_data['name']: + metadata_file += f"Maintainer: {', '.join(maintainers_data['name'])}\n" + if maintainers_data['email']: + metadata_file += f"Maintainer-email: {', '.join(maintainers_data['email'])}\n" + + if metadata.core.license: + license_start = 'License: ' + indent = ' ' * (len(license_start) - 1) + metadata_file += license_start + + for i, line in enumerate(metadata.core.license.splitlines()): + if i == 0: + metadata_file += f'{line}\n' + else: + metadata_file += f'{indent}{line}\n' + + if metadata.core.license_expression: + metadata_file += f'License-Expression: {metadata.core.license_expression}\n' + + if metadata.core.license_files: + for license_file in metadata.core.license_files: + metadata_file += f'License-File: {license_file}\n' + + if metadata.core.keywords: + metadata_file += f"Keywords: {','.join(metadata.core.keywords)}\n" + + if metadata.core.classifiers: + for classifier in metadata.core.classifiers: + metadata_file += f'Classifier: {classifier}\n' + + if metadata.core.requires_python: + metadata_file += f'Requires-Python: {metadata.core.requires_python}\n' + + if metadata.core.dependencies: + for dependency in metadata.core.dependencies: + metadata_file += f'Requires-Dist: {dependency}\n' + + if extra_dependencies: + for dependency in extra_dependencies: + metadata_file += f'Requires-Dist: {dependency}\n' + + if metadata.core.optional_dependencies: + for option, dependencies in metadata.core.optional_dependencies.items(): + metadata_file += f'Provides-Extra: {option}\n' + for dependency in dependencies: + if ';' in dependency: + dep_name, dep_env_marker = dependency.split(';', maxsplit=1) + metadata_file += f'Requires-Dist: {dep_name}; ({dep_env_marker.strip()}) and extra == {option!r}\n' + elif '@ ' in dependency: + metadata_file += f'Requires-Dist: {dependency} ; extra == {option!r}\n' + else: + metadata_file += f'Requires-Dist: {dependency}; extra == {option!r}\n' + + if metadata.core.readme: + metadata_file += f'Description-Content-Type: {metadata.core.readme_content_type}\n' + metadata_file += f'\n{metadata.core.readme}' + + return metadata_file + + +def construct_metadata_file_2_3(metadata: ProjectMetadata, extra_dependencies: tuple[str] | None = None) -> str: + """ + https://peps.python.org/pep-0639/ + """ + metadata_file = 'Metadata-Version: 2.3\n' + metadata_file += f'Name: {metadata.core.raw_name}\n' + metadata_file += f'Version: {metadata.version}\n' + + if metadata.core.description: + metadata_file += f'Summary: {metadata.core.description}\n' + + if metadata.core.urls: + for label, url in metadata.core.urls.items(): + metadata_file += f'Project-URL: {label}, {url}\n' + + authors_data = metadata.core.authors_data + if authors_data['name']: + metadata_file += f"Author: {', '.join(authors_data['name'])}\n" + if authors_data['email']: + metadata_file += f"Author-email: {', '.join(authors_data['email'])}\n" + + maintainers_data = metadata.core.maintainers_data + if maintainers_data['name']: + metadata_file += f"Maintainer: {', '.join(maintainers_data['name'])}\n" + if maintainers_data['email']: + metadata_file += f"Maintainer-email: {', '.join(maintainers_data['email'])}\n" + + if metadata.core.license_expression: + metadata_file += f'License-Expression: {metadata.core.license_expression}\n' + + if metadata.core.license_files: + for license_file in metadata.core.license_files: + metadata_file += f'License-File: {license_file}\n' + + if metadata.core.keywords: + metadata_file += f"Keywords: {','.join(metadata.core.keywords)}\n" + + if metadata.core.classifiers: + for classifier in metadata.core.classifiers: + metadata_file += f'Classifier: {classifier}\n' + + if metadata.core.requires_python: + metadata_file += f'Requires-Python: {metadata.core.requires_python}\n' + + if metadata.core.dependencies: + for dependency in metadata.core.dependencies: + metadata_file += f'Requires-Dist: {dependency}\n' + + if extra_dependencies: + for dependency in extra_dependencies: + metadata_file += f'Requires-Dist: {dependency}\n' + + if metadata.core.optional_dependencies: + for option, dependencies in metadata.core.optional_dependencies.items(): + metadata_file += f'Provides-Extra: {option}\n' + for dependency in dependencies: + if ';' in dependency: + dep_name, dep_env_marker = dependency.split(';', maxsplit=1) + metadata_file += f'Requires-Dist: {dep_name}; ({dep_env_marker.strip()}) and extra == {option!r}\n' + elif '@ ' in dependency: + metadata_file += f'Requires-Dist: {dependency} ; extra == {option!r}\n' + else: + metadata_file += f'Requires-Dist: {dependency}; extra == {option!r}\n' + + if metadata.core.readme: + metadata_file += f'Description-Content-Type: {metadata.core.readme_content_type}\n' + metadata_file += f'\n{metadata.core.readme}' + + return metadata_file diff --git a/src/hatchling/metadata/utils.py b/src/hatchling/metadata/utils.py new file mode 100644 index 0000000..f1d7b00 --- /dev/null +++ b/src/hatchling/metadata/utils.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +import re +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from packaging.requirements import Requirement + + from hatchling.metadata.core import ProjectMetadata + +# NOTE: this module should rarely be changed because it is likely to be used by other packages like Hatch + + +def is_valid_project_name(project_name: str) -> bool: + # https://peps.python.org/pep-0508/#names + return re.search('^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$', project_name, re.IGNORECASE) is not None + + +def normalize_project_name(project_name: str) -> str: + # https://peps.python.org/pep-0503/#normalized-names + return re.sub(r'[-_.]+', '-', project_name).lower() + + +def get_normalized_dependency(requirement: Requirement) -> str: + # Changes to this function affect reproducibility between versions + from packaging.specifiers import SpecifierSet + + requirement.name = normalize_project_name(requirement.name) + + if requirement.specifier: + requirement.specifier = SpecifierSet(str(requirement.specifier).lower()) + + if requirement.extras: + requirement.extras = {normalize_project_name(extra) for extra in requirement.extras} + + # All TOML writers use double quotes, so allow direct writing or copy/pasting to avoid escaping + return str(requirement).replace('"', "'") + + +def resolve_metadata_fields(metadata: ProjectMetadata) -> dict[str, Any]: + # https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ + return { + 'name': metadata.core.name, + 'version': metadata.version, + 'description': metadata.core.description, + 'readme': {'content-type': metadata.core.readme_content_type, 'text': metadata.core.readme}, + 'requires-python': metadata.core.requires_python, + 'license': metadata.core.license_expression or metadata.core.license, + 'authors': metadata.core.authors, + 'maintainers': metadata.core.maintainers, + 'keywords': metadata.core.keywords, + 'classifiers': metadata.core.classifiers, + 'urls': metadata.core.urls, + 'scripts': metadata.core.scripts, + 'gui-scripts': metadata.core.gui_scripts, + 'entry-points': metadata.core.entry_points, + 'dependencies': metadata.core.dependencies, + 'optional-dependencies': metadata.core.optional_dependencies, + } diff --git a/src/hatchling/ouroboros.py b/src/hatchling/ouroboros.py new file mode 100644 index 0000000..b7b286d --- /dev/null +++ b/src/hatchling/ouroboros.py @@ -0,0 +1,50 @@ +from __future__ import annotations + +import os +import re +from ast import literal_eval +from typing import Any + +from hatchling.build import * # noqa: F403 + + +def read_dependencies() -> list[str]: + pattern = r'^dependencies = (\[.*?\])$' + + with open(os.path.join(os.getcwd(), 'pyproject.toml'), encoding='utf-8') as f: + # Windows \r\n prevents match + contents = '\n'.join(line.rstrip() for line in f) + + match = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL) + if match is None: + message = 'No dependencies found' + raise ValueError(message) + + return literal_eval(match.group(1)) + + +def get_requires_for_build_sdist( # type: ignore[no-redef] + config_settings: dict[str, Any] | None = None, # noqa: ARG001 +) -> list[str]: + """ + https://peps.python.org/pep-0517/#get-requires-for-build-sdist + """ + return read_dependencies() + + +def get_requires_for_build_wheel( # type: ignore[no-redef] + config_settings: dict[str, Any] | None = None, # noqa: ARG001 +) -> list[str]: + """ + https://peps.python.org/pep-0517/#get-requires-for-build-wheel + """ + return read_dependencies() + + +def get_requires_for_build_editable( # type: ignore[no-redef] + config_settings: dict[str, Any] | None = None, # noqa: ARG001 +) -> list[str]: + """ + https://peps.python.org/pep-0660/#get-requires-for-build-editable + """ + return read_dependencies() diff --git a/src/hatchling/plugin/__init__.py b/src/hatchling/plugin/__init__.py new file mode 100644 index 0000000..07fdce4 --- /dev/null +++ b/src/hatchling/plugin/__init__.py @@ -0,0 +1,3 @@ +import pluggy + +hookimpl = pluggy.HookimplMarker('hatch') diff --git a/src/hatchling/plugin/exceptions.py b/src/hatchling/plugin/exceptions.py new file mode 100644 index 0000000..03c3088 --- /dev/null +++ b/src/hatchling/plugin/exceptions.py @@ -0,0 +1,2 @@ +class UnknownPluginError(ValueError): + pass diff --git a/src/hatchling/plugin/manager.py b/src/hatchling/plugin/manager.py new file mode 100644 index 0000000..70773cc --- /dev/null +++ b/src/hatchling/plugin/manager.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +from typing import Callable, TypeVar + +import pluggy + + +class PluginManager: + def __init__(self) -> None: + self.manager = pluggy.PluginManager('hatch') + self.third_party_plugins = ThirdPartyPlugins(self.manager) + self.initialized = False + + def initialize(self) -> None: + from hatchling.plugin import specs + + self.manager.add_hookspecs(specs) + + def __getattr__(self, name: str) -> ClassRegister: + if not self.initialized: + self.initialize() + self.initialized = True + + hook_name = f'hatch_register_{name}' + hook = getattr(self, hook_name, None) + if hook: + hook() + + register = ClassRegister(getattr(self.manager.hook, hook_name), 'PLUGIN_NAME', self.third_party_plugins) + setattr(self, name, register) + return register + + def hatch_register_version_source(self) -> None: + from hatchling.version.source.plugin import hooks + + self.manager.register(hooks) + + def hatch_register_version_scheme(self) -> None: + from hatchling.version.scheme.plugin import hooks + + self.manager.register(hooks) + + def hatch_register_builder(self) -> None: + from hatchling.builders.plugin import hooks + + self.manager.register(hooks) + + def hatch_register_build_hook(self) -> None: + from hatchling.builders.hooks.plugin import hooks + + self.manager.register(hooks) + + def hatch_register_metadata_hook(self) -> None: + from hatchling.metadata.plugin import hooks + + self.manager.register(hooks) + + +class ClassRegister: + def __init__(self, registration_method: Callable, identifier: str, third_party_plugins: ThirdPartyPlugins) -> None: + self.registration_method = registration_method + self.identifier = identifier + self.third_party_plugins = third_party_plugins + + def collect(self, *, include_third_party: bool = True) -> dict: + if include_third_party and not self.third_party_plugins.loaded: + self.third_party_plugins.load() + + classes: dict[str, type] = {} + + for raw_registered_classes in self.registration_method(): + registered_classes = ( + raw_registered_classes if isinstance(raw_registered_classes, list) else [raw_registered_classes] + ) + for registered_class in registered_classes: + name = getattr(registered_class, self.identifier, None) + if not name: # no cov + message = f'Class `{registered_class.__name__}` does not have a {name} attribute.' + raise ValueError(message) + + if name in classes: # no cov + message = ( + f'Class `{registered_class.__name__}` defines its name as `{name}` but ' + f'that name is already used by `{classes[name].__name__}`.' + ) + raise ValueError(message) + + classes[name] = registered_class + + return classes + + def get(self, name: str) -> type | None: + if not self.third_party_plugins.loaded: + classes = self.collect(include_third_party=False) + if name in classes: + return classes[name] + + return self.collect().get(name) + + +class ThirdPartyPlugins: + def __init__(self, manager: pluggy.PluginManager) -> None: + self.manager = manager + self.loaded = False + + def load(self) -> None: + self.manager.load_setuptools_entrypoints('hatch') + self.loaded = True + + +PluginManagerBound = TypeVar('PluginManagerBound', bound=PluginManager) diff --git a/src/hatchling/plugin/specs.py b/src/hatchling/plugin/specs.py new file mode 100644 index 0000000..0fd12d2 --- /dev/null +++ b/src/hatchling/plugin/specs.py @@ -0,0 +1,23 @@ +import pluggy + +hookspec = pluggy.HookspecMarker('hatch') + + +@hookspec +def hatch_register_version_source() -> None: + """Register new classes that adhere to the version source interface.""" + + +@hookspec +def hatch_register_builder() -> None: + """Register new classes that adhere to the builder interface.""" + + +@hookspec +def hatch_register_build_hook() -> None: + """Register new classes that adhere to the build hook interface.""" + + +@hookspec +def hatch_register_metadata_hook() -> None: + """Register new classes that adhere to the metadata hook interface.""" diff --git a/src/hatchling/plugin/utils.py b/src/hatchling/plugin/utils.py new file mode 100644 index 0000000..2965f18 --- /dev/null +++ b/src/hatchling/plugin/utils.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, TypeVar + +if TYPE_CHECKING: + from hatchling.builders.hooks.plugin.interface import BuildHookInterface + from hatchling.builders.plugin.interface import BuilderInterface + from hatchling.metadata.plugin.interface import MetadataHookInterface + + T = TypeVar('T', BuilderInterface, BuildHookInterface, MetadataHookInterface) + + +def load_plugin_from_script(path: str, script_name: str, plugin_class: type[T], plugin_id: str) -> type[T]: + import importlib + + spec = importlib.util.spec_from_file_location(script_name, path) # type: ignore + module = importlib.util.module_from_spec(spec) # type: ignore + spec.loader.exec_module(module) + + plugin_finder = f'get_{plugin_id}' + names = dir(module) + if plugin_finder in names: + return getattr(module, plugin_finder)() + + subclasses = [] + for name in names: + obj = getattr(module, name) + if obj is plugin_class: + continue + + try: + if issubclass(obj, plugin_class): + subclasses.append(obj) + except TypeError: + continue + + if not subclasses: + message = f'Unable to find a subclass of `{plugin_class.__name__}` in `{script_name}`: {path}' + raise ValueError(message) + + if len(subclasses) > 1: + message = ( + f'Multiple subclasses of `{plugin_class.__name__}` found in `{script_name}`, ' + f'select one by defining a function named `{plugin_finder}`: {path}' + ) + raise ValueError(message) + + return subclasses[0] diff --git a/src/hatchling/py.typed b/src/hatchling/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/utils/__init__.py b/src/hatchling/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/utils/constants.py b/src/hatchling/utils/constants.py new file mode 100644 index 0000000..5c3c4e2 --- /dev/null +++ b/src/hatchling/utils/constants.py @@ -0,0 +1,2 @@ +DEFAULT_BUILD_SCRIPT = 'hatch_build.py' +DEFAULT_CONFIG_FILE = 'hatch.toml' diff --git a/src/hatchling/utils/context.py b/src/hatchling/utils/context.py new file mode 100644 index 0000000..3006c06 --- /dev/null +++ b/src/hatchling/utils/context.py @@ -0,0 +1,170 @@ +from __future__ import annotations + +import os +import string +from abc import ABC, abstractmethod +from collections import ChainMap +from contextlib import contextmanager +from typing import Any, Iterable, Iterator, Mapping, MutableMapping, Sequence + +from hatchling.utils.fs import path_to_uri + + +class ContextFormatter(ABC): + @abstractmethod + def get_formatters(self) -> MutableMapping: + """ + This returns a mapping of supported field names to their respective formatting functions. Each function + accepts 2 arguments: + + - the `value` that was passed to the format call, defaulting to `None` + - the modifier `data`, defaulting to an empty string + """ + + @classmethod + def format_path(cls, path: str, modifier: str) -> str: + if not modifier: + return os.path.normpath(path) + + modifiers = modifier.split(':')[::-1] + while modifiers and modifiers[-1] == 'parent': + path = os.path.dirname(path) + modifiers.pop() + + if not modifiers: + return path + + if len(modifiers) > 1: + message = f'Expected a single path modifier and instead got: {", ".join(reversed(modifiers))}' + raise ValueError(message) + + modifier = modifiers[0] + if modifier == 'uri': + return path_to_uri(path) + + if modifier == 'real': + return os.path.realpath(path) + + message = f'Unknown path modifier: {modifier}' + raise ValueError(message) + + +class DefaultContextFormatter(ContextFormatter): + CONTEXT_NAME = 'default' + + def __init__(self, root: str) -> None: + self.__root = root + + def get_formatters(self) -> MutableMapping: + return { + '/': self.__format_directory_separator, + ';': self.__format_path_separator, + 'env': self.__format_env, + 'home': self.__format_home, + 'root': self.__format_root, + } + + def __format_directory_separator(self, value: str, data: str) -> str: # noqa: ARG002, PLR6301 + return os.sep + + def __format_path_separator(self, value: str, data: str) -> str: # noqa: ARG002, PLR6301 + return os.pathsep + + def __format_root(self, value: str, data: str) -> str: # noqa: ARG002 + return self.format_path(self.__root, data) + + def __format_home(self, value: str, data: str) -> str: # noqa: ARG002 + return self.format_path(os.path.expanduser('~'), data) + + def __format_env(self, value: str, data: str) -> str: # noqa: ARG002, PLR6301 + if not data: + message = 'The `env` context formatting field requires a modifier' + raise ValueError(message) + + env_var, separator, default = data.partition(':') + if env_var in os.environ: + return os.environ[env_var] + + if not separator: + message = f'Nonexistent environment variable must set a default: {env_var}' + raise ValueError(message) + + return default + + +class Context: + def __init__(self, root: str) -> None: + self.__root = str(root) + + # Allow callers to define their own formatters with precedence + self.__formatters: ChainMap = ChainMap() + self.__configured_contexts: set[str] = set() + self.__formatter = ContextStringFormatter(self.__formatters) + + self.add_context(DefaultContextFormatter(self.__root)) + + def format(self, *args: Any, **kwargs: Any) -> str: # noqa: A003 + return self.__formatter.format(*args, **kwargs) + + def add_context(self, context: DefaultContextFormatter) -> None: + if context.CONTEXT_NAME in self.__configured_contexts: + return + + self.__add_formatters(context.get_formatters()) + self.__configured_contexts.add(context.CONTEXT_NAME) + + @contextmanager + def apply_context(self, context: DefaultContextFormatter) -> Iterator: + self.__add_formatters(context.get_formatters()) + try: + yield + finally: + self.__remove_formatters() + + def __add_formatters(self, formatters: MutableMapping) -> None: + return self.__formatters.maps.insert(0, formatters) + + def __remove_formatters(self) -> None: + if len(self.__formatters.maps) > 1: + self.__formatters.maps.pop(0) + + +class ContextStringFormatter(string.Formatter): + def __init__(self, formatters: ChainMap) -> None: + super().__init__() + + self.__formatters = formatters + + def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: + # We override to increase the recursion limit from 2 to 10 + # + # TODO: remove type ignore after https://github.com/python/typeshed/pull/9228 + used_args = set() # type: ignore[var-annotated] + result, _ = self._vformat(format_string, args, kwargs, used_args, 10) + self.check_unused_args(used_args, args, kwargs) + return result + + def get_value(self, key: int | str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: + if key in self.__formatters: + # Avoid hard look-up and rely on `None` to indicate that the field is undefined + return kwargs.get(str(key)) + + try: + return super().get_value(key, args, kwargs) + except KeyError: + message = f'Unknown context field `{key}`' + raise ValueError(message) from None + + def format_field(self, value: Any, format_spec: str) -> Any: + formatter, _, data = format_spec.partition(':') + if formatter in self.__formatters: + return self.__formatters[formatter](value, data) + + return super().format_field(value, format_spec) + + def parse(self, format_string: str) -> Iterable: + for literal_text, field_name, format_spec, conversion in super().parse(format_string): + if field_name in self.__formatters: + yield literal_text, field_name, f'{field_name}:{format_spec}', conversion + else: + yield literal_text, field_name, format_spec, conversion diff --git a/src/hatchling/utils/fs.py b/src/hatchling/utils/fs.py new file mode 100644 index 0000000..21c93e2 --- /dev/null +++ b/src/hatchling/utils/fs.py @@ -0,0 +1,23 @@ +from __future__ import annotations + +import os + + +def locate_file(root: str, file_name: str) -> str | None: + while True: + file_path = os.path.join(root, file_name) + if os.path.isfile(file_path): + return file_path + + new_root = os.path.dirname(root) + if new_root == root: + return None + + root = new_root + + +def path_to_uri(path: str) -> str: + if os.sep == '/': + return f'file://{os.path.abspath(path).replace(" ", "%20")}' + + return f'file:///{os.path.abspath(path).replace(" ", "%20").replace(os.sep, "/")}' diff --git a/src/hatchling/version/__init__.py b/src/hatchling/version/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/version/core.py b/src/hatchling/version/core.py new file mode 100644 index 0000000..9b1756f --- /dev/null +++ b/src/hatchling/version/core.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +import os +import re + +DEFAULT_PATTERN = r'(?i)^(__version__|VERSION) *= *([\'"])v?(?P.+?)\2' +DEFAULT_TEMPLATE = """\ +# This file is auto-generated by Hatchling. As such, do not: +# - modify +# - track in version control e.g. be sure to add to .gitignore +__version__ = VERSION = {version!r} +""" + + +class VersionFile: + def __init__(self, root: str, relative_path: str) -> None: + self.__relative_path = relative_path + self.__path = os.path.normpath(os.path.join(root, relative_path)) + self.__cached_read_data: tuple | None = None + + def read(self, *, pattern: str | bool) -> str: + if not os.path.isfile(self.__path): + message = f'file does not exist: {self.__relative_path}' + raise OSError(message) + + with open(self.__path, encoding='utf-8') as f: + contents = f.read() + + if not pattern or pattern is True: + pattern = DEFAULT_PATTERN + + match = re.search(pattern, contents, flags=re.MULTILINE) + if not match: + message = f'unable to parse the version from the file: {self.__relative_path}' + raise ValueError(message) + + groups = match.groupdict() + if 'version' not in groups: + message = 'no group named `version` was defined in the pattern' + raise ValueError(message) + + self.__cached_read_data = groups['version'], contents, match.span('version') + return self.__cached_read_data[0] + + def set_version(self, version: str) -> None: + _old_version, file_contents, (start, end) = self.__cached_read_data # type: ignore + with open(self.__path, 'w', encoding='utf-8') as f: + f.write(f'{file_contents[:start]}{version}{file_contents[end:]}') + + def write(self, version: str, template: str = DEFAULT_TEMPLATE) -> None: + template = template or DEFAULT_TEMPLATE + + parent_dir = os.path.dirname(self.__path) + if not os.path.isdir(parent_dir): + os.makedirs(parent_dir) + + with open(self.__path, 'w', encoding='utf-8') as f: + f.write(template.format(version=version)) diff --git a/src/hatchling/version/scheme/__init__.py b/src/hatchling/version/scheme/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/version/scheme/plugin/__init__.py b/src/hatchling/version/scheme/plugin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/version/scheme/plugin/hooks.py b/src/hatchling/version/scheme/plugin/hooks.py new file mode 100644 index 0000000..d36a323 --- /dev/null +++ b/src/hatchling/version/scheme/plugin/hooks.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from hatchling.plugin import hookimpl +from hatchling.version.scheme.standard import StandardScheme + +if TYPE_CHECKING: + from hatchling.version.scheme.plugin.interface import VersionSchemeInterface + + +@hookimpl +def hatch_register_version_scheme() -> type[VersionSchemeInterface]: + return StandardScheme diff --git a/src/hatchling/version/scheme/plugin/interface.py b/src/hatchling/version/scheme/plugin/interface.py new file mode 100644 index 0000000..0e38b15 --- /dev/null +++ b/src/hatchling/version/scheme/plugin/interface.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod + + +class VersionSchemeInterface(ABC): # no cov + """ + Example usage: + + ```python tab="plugin.py" + from hatchling.version.scheme.plugin.interface import VersionSchemeInterface + + + class SpecialVersionScheme(VersionSchemeInterface): + PLUGIN_NAME = 'special' + ... + ``` + + ```python tab="hooks.py" + from hatchling.plugin import hookimpl + + from .plugin import SpecialVersionScheme + + + @hookimpl + def hatch_register_version_scheme(): + return SpecialVersionScheme + ``` + """ + + PLUGIN_NAME = '' + """The name used for selection.""" + + def __init__(self, root: str, config: dict) -> None: + self.__root = root + self.__config = config + + @property + def root(self) -> str: + """ + The root of the project tree as a string. + """ + return self.__root + + @property + def config(self) -> dict: + """ + ```toml config-example + [tool.hatch.version] + ``` + """ + return self.__config + + @abstractmethod + def update(self, desired_version: str, original_version: str, version_data: dict) -> str: + """ + This should return a normalized form of the desired version and verify that it + is higher than the original version. + """ diff --git a/src/hatchling/version/scheme/standard.py b/src/hatchling/version/scheme/standard.py new file mode 100644 index 0000000..e5e2f28 --- /dev/null +++ b/src/hatchling/version/scheme/standard.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Tuple, cast + +from hatchling.version.scheme.plugin.interface import VersionSchemeInterface + +if TYPE_CHECKING: + from packaging.version import Version + + +class StandardScheme(VersionSchemeInterface): + """ + See https://peps.python.org/pep-0440/ + """ + + PLUGIN_NAME = 'standard' + + def update( + self, + desired_version: str, + original_version: str, + version_data: dict, # noqa: ARG002 + ) -> str: + from packaging.version import Version + + original = Version(original_version) + versions = desired_version.split(',') + + for version in versions: + if version == 'release': + reset_version_parts(original, release=original.release) + elif version == 'major': + reset_version_parts(original, release=update_release(original, [original.major + 1])) + elif version == 'minor': + reset_version_parts(original, release=update_release(original, [original.major, original.minor + 1])) + elif version in {'micro', 'patch', 'fix'}: + reset_version_parts( + original, release=update_release(original, [original.major, original.minor, original.micro + 1]) + ) + elif version in {'a', 'b', 'c', 'rc', 'alpha', 'beta', 'pre', 'preview'}: + phase, number = parse_letter_version(version, 0) + if original.pre: + current_phase, current_number = parse_letter_version(*original.pre) + if phase == current_phase: + number = current_number + 1 + + reset_version_parts(original, pre=(phase, number)) + elif version in {'post', 'rev', 'r'}: + number = 0 if original.post is None else original.post + 1 + reset_version_parts(original, post=parse_letter_version(version, number)) + elif version == 'dev': + number = 0 if original.dev is None else original.dev + 1 + reset_version_parts(original, dev=(version, number)) + else: + if len(versions) > 1: + message = 'Cannot specify multiple update operations with an explicit version' + raise ValueError(message) + + next_version = Version(version) + if self.config.get('validate-bump', True) and next_version <= original: + message = f'Version `{version}` is not higher than the original version `{original_version}`' + raise ValueError(message) + + return str(next_version) + + return str(original) + + +def reset_version_parts(version: Version, **kwargs: Any) -> None: + # https://github.com/pypa/packaging/blob/20.9/packaging/version.py#L301-L310 + internal_version = version._version # noqa: SLF001 + parts: dict[str, Any] = {} + ordered_part_names = ('epoch', 'release', 'pre', 'post', 'dev', 'local') + + reset = False + for part_name in ordered_part_names: + if reset: + parts[part_name] = kwargs.get(part_name) + elif part_name in kwargs: + parts[part_name] = kwargs[part_name] + reset = True + else: + parts[part_name] = getattr(internal_version, part_name) + + version._version = type(internal_version)(**parts) # noqa: SLF001 + + +def update_release(original_version: Version, new_release_parts: list[int]) -> tuple[int, ...]: + # Retain release length + new_release_parts.extend(0 for _ in range(len(original_version.release) - len(new_release_parts))) + + return tuple(new_release_parts) + + +def parse_letter_version(*args: Any, **kwargs: Any) -> tuple[str, int]: + from packaging.version import _parse_letter_version + + return cast(Tuple[str, int], _parse_letter_version(*args, **kwargs)) diff --git a/src/hatchling/version/source/__init__.py b/src/hatchling/version/source/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/version/source/code.py b/src/hatchling/version/source/code.py new file mode 100644 index 0000000..9e99ab3 --- /dev/null +++ b/src/hatchling/version/source/code.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +import os + +from hatchling.version.source.plugin.interface import VersionSourceInterface + + +class CodeSource(VersionSourceInterface): + PLUGIN_NAME = 'code' + + def get_version_data(self) -> dict: + import importlib + import sys + + relative_path = self.config.get('path') + if not relative_path: + message = 'option `path` must be specified' + raise ValueError(message) + + if not isinstance(relative_path, str): + message = 'option `path` must be a string' + raise TypeError(message) + + path = os.path.normpath(os.path.join(self.root, relative_path)) + if not os.path.isfile(path): + message = f'file does not exist: {relative_path}' + raise OSError(message) + + expression = self.config.get('expression') or '__version__' + if not isinstance(expression, str): + message = 'option `expression` must be a string' + raise TypeError(message) + + search_paths = self.config.get('search-paths', []) + if not isinstance(search_paths, list): + message = 'option `search-paths` must be an array' + raise TypeError(message) + + absolute_search_paths = [] + for i, search_path in enumerate(search_paths, 1): + if not isinstance(search_path, str): + message = f'entry #{i} of option `search-paths` must be a string' + raise TypeError(message) + + absolute_search_paths.append(os.path.normpath(os.path.join(self.root, search_path))) + + spec = importlib.util.spec_from_file_location(os.path.splitext(path)[0], path) # type: ignore + module = importlib.util.module_from_spec(spec) # type: ignore + + old_search_paths = list(sys.path) + try: + sys.path[:] = [*absolute_search_paths, *old_search_paths] + spec.loader.exec_module(module) + finally: + sys.path[:] = old_search_paths + + # Execute the expression to determine the version + version = eval(expression, vars(module)) # noqa: PGH001, S307 + + return {'version': version} + + def set_version(self, version: str, version_data: dict) -> None: # noqa: ARG002, PLR6301 + message = 'Cannot rewrite loaded code' + raise NotImplementedError(message) diff --git a/src/hatchling/version/source/env.py b/src/hatchling/version/source/env.py new file mode 100644 index 0000000..b463894 --- /dev/null +++ b/src/hatchling/version/source/env.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +import os + +from hatchling.version.source.plugin.interface import VersionSourceInterface + + +class EnvSource(VersionSourceInterface): + PLUGIN_NAME = 'env' + + def get_version_data(self) -> dict: + variable = self.config.get('variable', '') + if not variable: + message = 'option `variable` must be specified' + raise ValueError(message) + + if not isinstance(variable, str): + message = 'option `variable` must be a string' + raise TypeError(message) + + if variable not in os.environ: + message = f'environment variable `{variable}` is not set' + raise RuntimeError(message) + + return {'version': os.environ[variable]} + + def set_version(self, version: str, version_data: dict) -> None: # noqa: ARG002, PLR6301 + message = 'Cannot set environment variables' + raise NotImplementedError(message) diff --git a/src/hatchling/version/source/plugin/__init__.py b/src/hatchling/version/source/plugin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/hatchling/version/source/plugin/hooks.py b/src/hatchling/version/source/plugin/hooks.py new file mode 100644 index 0000000..ef95fc3 --- /dev/null +++ b/src/hatchling/version/source/plugin/hooks.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from hatchling.plugin import hookimpl +from hatchling.version.source.code import CodeSource +from hatchling.version.source.env import EnvSource +from hatchling.version.source.regex import RegexSource + +if TYPE_CHECKING: + from hatchling.version.source.plugin.interface import VersionSourceInterface + + +@hookimpl +def hatch_register_version_source() -> list[type[VersionSourceInterface]]: + return [CodeSource, EnvSource, RegexSource] diff --git a/src/hatchling/version/source/plugin/interface.py b/src/hatchling/version/source/plugin/interface.py new file mode 100644 index 0000000..4f069eb --- /dev/null +++ b/src/hatchling/version/source/plugin/interface.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod + + +class VersionSourceInterface(ABC): # no cov + """ + Example usage: + + ```python tab="plugin.py" + from hatchling.version.source.plugin.interface import VersionSourceInterface + + + class SpecialVersionSource(VersionSourceInterface): + PLUGIN_NAME = 'special' + ... + ``` + + ```python tab="hooks.py" + from hatchling.plugin import hookimpl + + from .plugin import SpecialVersionSource + + + @hookimpl + def hatch_register_version_source(): + return SpecialVersionSource + ``` + """ + + PLUGIN_NAME = '' + """The name used for selection.""" + + def __init__(self, root: str, config: dict) -> None: + self.__root = root + self.__config = config + + @property + def root(self) -> str: + """ + The root of the project tree as a string. + """ + return self.__root + + @property + def config(self) -> dict: + """ + ```toml config-example + [tool.hatch.version] + ``` + """ + return self.__config + + @abstractmethod + def get_version_data(self) -> dict: + """ + This should return a mapping with a `version` key representing the current version of the project and will be + displayed when invoking the [`version`](../../cli/reference.md#hatch-version) command without any arguments. + + The mapping can contain anything else and will be passed to + [set_version](reference.md#hatchling.version.source.plugin.interface.VersionSourceInterface.set_version) + when updating the version. + """ + + def set_version(self, version: str, version_data: dict) -> None: + """ + This should update the version to the first argument with the data provided during retrieval. + """ + raise NotImplementedError diff --git a/src/hatchling/version/source/regex.py b/src/hatchling/version/source/regex.py new file mode 100644 index 0000000..9bb878d --- /dev/null +++ b/src/hatchling/version/source/regex.py @@ -0,0 +1,29 @@ +from hatchling.version.core import VersionFile +from hatchling.version.source.plugin.interface import VersionSourceInterface + + +class RegexSource(VersionSourceInterface): + PLUGIN_NAME = 'regex' + + def get_version_data(self) -> dict: + relative_path = self.config.get('path', '') + if not relative_path: + message = 'option `path` must be specified' + raise ValueError(message) + + if not isinstance(relative_path, str): + message = 'option `path` must be a string' + raise TypeError(message) + + pattern = self.config.get('pattern', '') + if not isinstance(pattern, str): + message = 'option `pattern` must be a string' + raise TypeError(message) + + version_file = VersionFile(self.root, relative_path) + version = version_file.read(pattern=pattern) + + return {'version': version, 'version_file': version_file} + + def set_version(self, version: str, version_data: dict) -> None: # noqa: PLR6301 + version_data['version_file'].set_version(version) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/downstream/datadogpy/data.json b/tests/downstream/datadogpy/data.json new file mode 100644 index 0000000..4b1b19d --- /dev/null +++ b/tests/downstream/datadogpy/data.json @@ -0,0 +1,6 @@ +{ + "repo_url": "https://github.com/DataDog/datadogpy", + "statements": [ + "from datadog import initialize, api" + ] +} diff --git a/tests/downstream/datadogpy/pyproject.toml b/tests/downstream/datadogpy/pyproject.toml new file mode 100644 index 0000000..ce79adb --- /dev/null +++ b/tests/downstream/datadogpy/pyproject.toml @@ -0,0 +1,58 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "datadog" +description = "The Datadog Python library" +readme = "README.md" +license = "BSD-3-Clause" +keywords = [ + "datadog", +] +authors = [ + { name = "Datadog, Inc.", email = "dev@datadoghq.com" }, +] +classifiers = [ + "Operating System :: OS Independent", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + 'Programming Language :: Python :: Implementation :: CPython', + "Programming Language :: Python :: Implementation :: PyPy", +] +dependencies = [ + "requests>=2.6.0", + "typing; python_version<'3.5'", + "configparser<5; python_version<'3.0'", +] +dynamic = ["version"] + +[project.urls] +"Bug Tracker" = "https://github.com/DataDog/datadogpy/issues" +Documentation = "https://datadogpy.readthedocs.io/en/latest/" +"Source Code" = "https://github.com/DataDog/datadogpy" + +[project.scripts] +dog = "datadog.dogshell:main" +dogwrap = "datadog.dogshell.wrap:main" +dogshell = "datadog.dogshell:main" +dogshellwrap = "datadog.dogshell.wrap:main" + +[tool.hatch.version] +path = "datadog/version.py" + +[tool.hatch.build] +packages = ["datadog"] + +[tool.hatch.build.targets.sdist] +include = [ + "/LICENSE", + "/tests", +] + +[tool.hatch.build.targets.wheel] diff --git a/tests/downstream/hatch-showcase/data.json b/tests/downstream/hatch-showcase/data.json new file mode 100644 index 0000000..a722b9a --- /dev/null +++ b/tests/downstream/hatch-showcase/data.json @@ -0,0 +1,9 @@ +{ + "repo_url": "https://github.com/ofek/hatch-showcase", + "statements": [ + "from hatch_showcase.fib import fibonacci; assert fibonacci(32) == 2178309" + ], + "env_vars": { + "HATCH_BUILD_HOOKS_ENABLE": "true" + } +} diff --git a/tests/downstream/integrate.py b/tests/downstream/integrate.py new file mode 100644 index 0000000..c502913 --- /dev/null +++ b/tests/downstream/integrate.py @@ -0,0 +1,265 @@ +import errno +import json +import os +import platform +import shutil +import stat +import subprocess +import sys +import tempfile +from contextlib import contextmanager +from zipfile import ZipFile + +import requests +import tomli +from packaging.requirements import Requirement +from packaging.specifiers import SpecifierSet +from virtualenv import cli_run + +HERE = os.path.dirname(os.path.abspath(__file__)) +ON_WINDOWS = platform.system() == 'Windows' + + +def handle_remove_readonly(func, path, exc): # no cov + # PermissionError: [WinError 5] Access is denied: '...\\.git\\...' + if func in {os.rmdir, os.remove, os.unlink} and exc[1].errno == errno.EACCES: + os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) + func(path) + else: + raise exc + + +class EnvVars(dict): + def __init__(self, env_vars=None, ignore=None): + super().__init__(os.environ) + self.old_env = dict(self) + + if env_vars is not None: + self.update(env_vars) + + if ignore is not None: + for env_var in ignore: + self.pop(env_var, None) + + def __enter__(self): + os.environ.clear() + os.environ.update(self) + + def __exit__(self, exc_type, exc_value, traceback): + os.environ.clear() + os.environ.update(self.old_env) + + +def python_version_supported(project_config): + requires_python = project_config['project'].get('requires-python', '') + if requires_python: + python_constraint = SpecifierSet(requires_python) + if not python_constraint.contains(str('.'.join(map(str, sys.version_info[:2])))): + return False + + return True + + +def download_file(url, file_name): + response = requests.get(url, stream=True, timeout=20) + with open(file_name, 'wb') as f: + for chunk in response.iter_content(16384): + f.write(chunk) + + +@contextmanager +def temp_dir(): + d = tempfile.mkdtemp() + + try: + d = os.path.realpath(d) + yield d + finally: + shutil.rmtree(d, ignore_errors=False, onerror=handle_remove_readonly) + + +def get_venv_exe_dir(venv_dir): + exe_dir = os.path.join(venv_dir, 'Scripts' if ON_WINDOWS else 'bin') + if os.path.isdir(exe_dir): + return exe_dir + + # PyPy + if ON_WINDOWS: + exe_dir = os.path.join(venv_dir, 'bin') + if os.path.isdir(exe_dir): + return exe_dir + + message = f'Unable to locate executables directory within: {venv_dir}' + raise OSError(message) + + # Debian + if os.path.isdir(os.path.join(venv_dir, 'local')): + exe_dir = os.path.join(venv_dir, 'local', 'bin') + if os.path.isdir(exe_dir): + return exe_dir + + message = f'Unable to locate executables directory within: {venv_dir}' + raise OSError(message) + + message = f'Unable to locate executables directory within: {venv_dir}' + raise OSError(message) + + +def main(): + original_backend_path = os.path.dirname(os.path.dirname(HERE)) + with temp_dir() as links_dir, temp_dir() as build_dir: + print('<<<<< Copying backend >>>>>') + backend_path = os.path.join(build_dir, 'backend') + shutil.copytree(original_backend_path, backend_path) + + # Increment the minor version + version_file = os.path.join(backend_path, 'src', 'hatchling', '__about__.py') + with open(version_file, encoding='utf-8') as f: + lines = f.readlines() + + for i, line in enumerate(lines): + if line.startswith('__version__'): + version = line.strip().split(' = ')[1].strip('\'"') + version_parts = version.split('.') + version_parts[1] = str(int(version_parts[1]) + 1) + lines[i] = line.replace(version, '.'.join(version_parts)) + break + else: + message = 'No version found' + raise ValueError(message) + + with open(version_file, 'w', encoding='utf-8') as f: + f.writelines(lines) + + print('<<<<< Building backend >>>>>') + subprocess.check_call([sys.executable, '-m', 'build', '--wheel', '-o', links_dir, backend_path]) + subprocess.check_call([ + sys.executable, + '-m', + 'pip', + 'download', + '-q', + '--disable-pip-version-check', + '--no-python-version-warning', + '-d', + links_dir, + os.path.join(links_dir, os.listdir(links_dir)[0]), + ]) + + constraints = [] + constraints_file = os.path.join(build_dir, 'constraints.txt') + with open(constraints_file, 'w', encoding='utf-8') as f: + f.write('\n'.join(constraints)) + + for project in os.listdir(HERE): + project_dir = os.path.join(HERE, project) + if not os.path.isdir(project_dir): + continue + + print(f'<<<<< Project: {project} >>>>>') + project_config = {} + potential_project_file = os.path.join(project_dir, 'pyproject.toml') + + # Not yet ported + if os.path.isfile(potential_project_file): + with open(potential_project_file, encoding='utf-8') as f: + project_config.update(tomli.loads(f.read())) + + if not python_version_supported(project_config): + print('--> Unsupported version of Python, skipping') + continue + + with open(os.path.join(project_dir, 'data.json'), encoding='utf-8') as f: + test_data = json.loads(f.read()) + + with temp_dir() as d: + if 'repo_url' in test_data: + print('--> Cloning repository') + repo_dir = os.path.join(d, 'repo') + subprocess.check_call(['git', 'clone', '-q', '--depth', '1', test_data['repo_url'], repo_dir]) + else: + archive_name = f'{project}.zip' + archive_path = os.path.join(d, archive_name) + + print('--> Downloading archive') + download_file(test_data['archive_url'], archive_path) + with ZipFile(archive_path) as zip_file: + zip_file.extractall(d) + + entries = os.listdir(d) + entries.remove(archive_name) + repo_dir = os.path.join(d, entries[0]) + + project_file = os.path.join(repo_dir, 'pyproject.toml') + if project_config: + shutil.copyfile(potential_project_file, project_file) + else: + if not os.path.isfile(project_file): + sys.exit('--> Missing file: pyproject.toml') + + with open(project_file, encoding='utf-8') as f: + project_config.update(tomli.loads(f.read())) + + for requirement in project_config.get('build-system', {}).get('requires', []): + if Requirement(requirement).name == 'hatchling': + break + else: + sys.exit('--> Field `build-system.requires` must specify `hatchling` as a requirement') + + if not python_version_supported(project_config): + print('--> Unsupported version of Python, skipping') + continue + + for file_name in ('MANIFEST.in', 'setup.cfg', 'setup.py'): + possible_path = os.path.join(repo_dir, file_name) + if os.path.isfile(possible_path): + os.remove(possible_path) + + venv_dir = os.path.join(d, '.venv') + print('--> Creating virtual environment') + cli_run([venv_dir, '--no-download', '--no-periodic-update']) + + env_vars = dict(test_data.get('env_vars', {})) + env_vars['VIRTUAL_ENV'] = venv_dir + env_vars['PATH'] = f'{get_venv_exe_dir(venv_dir)}{os.pathsep}{os.environ["PATH"]}' + env_vars['PIP_CONSTRAINT'] = constraints_file + with EnvVars(env_vars, ignore=('__PYVENV_LAUNCHER__', 'PYTHONHOME')): + print('--> Installing project') + subprocess.check_call([ + shutil.which('pip'), + 'install', + '-q', + '--disable-pip-version-check', + '--no-python-version-warning', + '--find-links', + links_dir, + '--no-deps', + repo_dir, + ]) + + print('--> Installing dependencies') + subprocess.check_call([ + shutil.which('pip'), + 'install', + '-q', + '--disable-pip-version-check', + '--no-python-version-warning', + repo_dir, + ]) + + print('--> Testing package') + for statement in test_data['statements']: + subprocess.check_call([shutil.which('python'), '-c', statement]) + + scripts = project_config['project'].get('scripts', {}) + if scripts: + print('--> Testing scripts') + for script in scripts: + if not shutil.which(script): + sys.exit(f'--> Could not locate script: {script}') + + print('--> Success!') + + +if __name__ == '__main__': + main() diff --git a/tests/downstream/requirements.txt b/tests/downstream/requirements.txt new file mode 100644 index 0000000..b7f56dc --- /dev/null +++ b/tests/downstream/requirements.txt @@ -0,0 +1,5 @@ +build +packaging +requests +tomli +virtualenv>=20.13.1 -- 2.34.1