diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 51d22156e4..cbb10b8211 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 73.0.1 +current_version = 74.1.2 commit = True tag = True diff --git a/.coveragerc b/.coveragerc index c8d1cbbd6e..be34df9d9d 100644 --- a/.coveragerc +++ b/.coveragerc @@ -7,6 +7,9 @@ omit = */_vendor/* */tools/* */setuptools/_distutils/* + # See #4588 for integration tests coverage + */setuptools/tests/integration/* + */setuptools/tests/test_integration.py disable_warnings = couldnt-parse diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b9ecc51412..f90a4607b1 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -43,7 +43,7 @@ jobs: matrix: python: - "3.8" - - "3.12" + - "3.13" platform: - ubuntu-latest - macos-latest @@ -55,6 +55,10 @@ jobs: platform: ubuntu-latest - python: "3.11" platform: ubuntu-latest + - python: "3.12" + platform: ubuntu-latest + - python: "3.14" + platform: ubuntu-latest - python: pypy3.10 platform: ubuntu-latest distutils: stdlib @@ -68,7 +72,7 @@ jobs: exclude: - {python: "3.8", platform: "macos-latest"} runs-on: ${{ matrix.platform }} - continue-on-error: ${{ matrix.python == '3.13' }} + continue-on-error: ${{ matrix.python == '3.14' }} env: SETUPTOOLS_USE_DISTUTILS: ${{ matrix.distutils || 'local' }} timeout-minutes: 75 diff --git a/.github/workflows/pyright.yml b/.github/workflows/pyright.yml index bb25f1ba82..38fb910d85 100644 --- a/.github/workflows/pyright.yml +++ b/.github/workflows/pyright.yml @@ -9,7 +9,7 @@ on: - gh-readonly-queue/** tags: # required if branches-ignore is supplied (jaraco/skeleton#103) - - '**' + - "**" pull_request: workflow_dispatch: @@ -24,15 +24,17 @@ env: # pin pyright version so a new version doesn't suddenly cause the CI to fail, # until types-setuptools is removed from typeshed. # For help with static-typing issues, or pyright update, ping @Avasam + # + # An exact version from https://github.com/microsoft/pyright/releases or "latest" PYRIGHT_VERSION: "1.1.377" # Environment variable to support color support (jaraco/skeleton#66) FORCE_COLOR: 1 # Suppress noisy pip warnings - PIP_DISABLE_PIP_VERSION_CHECK: 'true' - PIP_NO_PYTHON_VERSION_WARNING: 'true' - PIP_NO_WARN_SCRIPT_LOCATION: 'true' + PIP_DISABLE_PIP_VERSION_CHECK: "true" + PIP_NO_PYTHON_VERSION_WARNING: "true" + PIP_NO_WARN_SCRIPT_LOCATION: "true" jobs: pyright: @@ -65,7 +67,7 @@ jobs: else echo '> pip install pyright==${{ env.PYRIGHT_VERSION }}' fi - echo 'pyright --threads' + echo '> pyright --threads' shell: bash - name: Run pyright uses: jakebailey/pyright-action@v2 diff --git a/MANIFEST.in b/MANIFEST.in index cb6ce2300e..0643e7ee2d 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -19,4 +19,3 @@ include tox.ini include setuptools/tests/config/setupcfg_examples.txt include setuptools/config/*.schema.json global-exclude *.py[cod] __pycache__ -prune .tox diff --git a/NEWS.rst b/NEWS.rst index 3c36fbfa5a..7b62a76e0c 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -1,3 +1,66 @@ +v74.1.2 +======= + +Bugfixes +-------- + +- Fixed TypeError in sdist filelist processing by adding support for pathlib Paths for the build_base. (#4615) +- Removed degraded and deprecated ``test_integration`` (easy_install) from the test suite. (#4632) + + +v74.1.1 +======= + +Bugfixes +-------- + +- Fixed TypeError in ``msvc.EnvironmentInfo.return_env`` when no runtime redistributables are installed. (#1902) + + +v74.1.0 +======= + +Features +-------- + +- Added support for defining ``ext-modules`` via ``pyproject.toml`` + (**EXPERIMENTAL**, may change in future releases). (#4568) + + +Bugfixes +-------- + +- Merge with pypa/distutils@3dcdf8567, removing the duplicate vendored copy of packaging. (#4622) +- Restored ``setuptools.msvc.Environmentinfo`` as it is used externally. (#4625) + + +v74.0.0 +======= + +Features +-------- + +- Changed the type of error raised by ``setuptools.command.easy_install.CommandSpec.from_param`` on unsupported argument from `AttributeError` to `TypeError` -- by :user:`Avasam` (#4548) +- Added detection of ARM64 variant of MSVC -- by :user:`saschanaz` (#4553) +- Made ``setuptools.package_index.Credential`` a `typing.NamedTuple` -- by :user:`Avasam` (#4585) +- Reraise error from ``setuptools.command.easy_install.auto_chmod`` instead of nonsensical ``TypeError: 'Exception' object is not subscriptable`` -- by :user:`Avasam` (#4593) +- Fully typed all collection attributes in ``pkg_resources`` -- by :user:`Avasam` (#4598) +- Automatically exclude ``.tox|.nox|.venv`` directories from ``sdist``. (#4603) + + +Deprecations and Removals +------------------------- + +- Removed the monkeypatching of distutils._msvccompiler. Now all compiler logic is consolidated in distutils. (#4600) +- Synced with pypa/distutils@58fe058e4, including consolidating Visual Studio 2017 support (#4600, pypa/distutils#289), removal of deprecated legacy MSVC compiler modules (pypa/distutils#287), suppressing of errors when the home directory is missing (pypa/distutils#278), removal of wininst binaries (pypa/distutils#282). (#4606) + + +Misc +---- + +- #4592 + + v73.0.1 ======= diff --git a/conftest.py b/conftest.py index eb052d05ca..cb65dbc30e 100644 --- a/conftest.py +++ b/conftest.py @@ -1,6 +1,7 @@ from __future__ import annotations import os +import platform import sys import pytest @@ -82,3 +83,9 @@ def conditional_skip(request): pytest.skip("running integration tests only") if not running_integration_tests and is_integration_test: pytest.skip("skipping integration tests") + + +@pytest.fixture +def windows_only(): + if platform.system() != 'Windows': + pytest.skip("Windows only") diff --git a/docs/conf.py b/docs/conf.py index 0f85661789..4ea38e7490 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,3 +1,5 @@ +from __future__ import annotations + extensions = [ 'sphinx.ext.autodoc', 'jaraco.packaging.sphinx', @@ -93,6 +95,7 @@ # Be strict about any broken references nitpicky = True +nitpick_ignore: list[tuple[str, str]] = [] # Include Python intersphinx mapping to prevent failures # jaraco/skeleton#51 @@ -104,32 +107,17 @@ # Preserve authored syntax for defaults autodoc_preserve_defaults = True -intersphinx_mapping.update({ - 'pip': ('https://pip.pypa.io/en/latest', None), - 'build': ('https://build.pypa.io/en/latest', None), - 'PyPUG': ('https://packaging.python.org/en/latest/', None), - 'packaging': ('https://packaging.pypa.io/en/latest/', None), - 'twine': ('https://twine.readthedocs.io/en/stable/', None), - 'importlib-resources': ( - 'https://importlib-resources.readthedocs.io/en/latest', - None, - ), -}) - -# Add support for linking usernames -github_url = 'https://github.com' -github_repo_org = 'pypa' -github_repo_name = 'setuptools' -github_repo_slug = f'{github_repo_org}/{github_repo_name}' -github_repo_url = f'{github_url}/{github_repo_slug}' -github_sponsors_url = f'{github_url}/sponsors' +# Add support for linking usernames, PyPI projects, Wikipedia pages +github_url = 'https://github.com/' extlinks = { - 'user': (f'{github_sponsors_url}/%s', '@%s'), + 'user': (f'{github_url}%s', '@%s'), 'pypi': ('https://pypi.org/project/%s', '%s'), 'wiki': ('https://wikipedia.org/wiki/%s', '%s'), } extensions += ['sphinx.ext.extlinks'] +# local + # Ref: https://github.com/python-attrs/attrs/pull/571/files\ # #diff-85987f48f1258d9ee486e3191495582dR82 default_role = 'any' @@ -163,7 +151,7 @@ # Support for distutils # Ref: https://stackoverflow.com/a/30624034/595220 -nitpick_ignore = [ +nitpick_ignore += [ ('c:func', 'SHGetSpecialFolderPath'), # ref to MS docs ('envvar', 'DIST_EXTRA_CONFIG'), # undocumented ('envvar', 'DISTUTILS_DEBUG'), # undocumented @@ -245,3 +233,15 @@ }, # rel="apple-touch-icon" does not support SVG yet ] + +intersphinx_mapping.update({ + 'pip': ('https://pip.pypa.io/en/latest', None), + 'build': ('https://build.pypa.io/en/latest', None), + 'PyPUG': ('https://packaging.python.org/en/latest/', None), + 'packaging': ('https://packaging.pypa.io/en/latest/', None), + 'twine': ('https://twine.readthedocs.io/en/stable/', None), + 'importlib-resources': ( + 'https://importlib-resources.readthedocs.io/en/latest', + None, + ), +}) diff --git a/docs/development/developer-guide.rst b/docs/development/developer-guide.rst index 4566fef8d0..a9b3ad3bab 100644 --- a/docs/development/developer-guide.rst +++ b/docs/development/developer-guide.rst @@ -132,6 +132,29 @@ To refresh the dependencies, run the following command:: $ tox -e vendor + +------------------------------------ +Code conventions and other practices +------------------------------------ + +Setuptools utilizes the `skeleton `_ +framework as a foundation for sharing re-usable maintenance tasks +across different projects in the ecosystem. + +This also means that the project adheres to the same coding conventions +and other practices described in the `skeleton documentation +`_. + +Moreover, changes in the code base should be kept as compatible as possible +to ``skeleton`` to avoid merge conflicts, or accidental regressions on +periodical merges. + +Finally, the ``setuptools/_distutils`` directory should not be modified +directly when contributing to the ``setuptools`` project. +Instead, this directory is maintained as a separated project in +https://github.com/pypa/distutils, and periodically merged into ``setuptools``. + + ---------------- Type annotations ---------------- diff --git a/docs/userguide/ext_modules.rst b/docs/userguide/ext_modules.rst index eabc2c0ab3..19954f50e4 100644 --- a/docs/userguide/ext_modules.rst +++ b/docs/userguide/ext_modules.rst @@ -27,22 +27,41 @@ and all project metadata configuration in the ``pyproject.toml`` file: version = "0.42" To instruct setuptools to compile the ``foo.c`` file into the extension module -``mylib.foo``, we need to add a ``setup.py`` file similar to the following: +``mylib.foo``, we need to define an appropriate configuration in either +``pyproject.toml`` [#pyproject.toml]_ or ``setup.py`` file , +similar to the following: -.. code-block:: python +.. tab:: pyproject.toml - from setuptools import Extension, setup + .. code-block:: toml - setup( - ext_modules=[ - Extension( - name="mylib.foo", # as it would be imported - # may include packages/namespaces separated by `.` - - sources=["foo.c"], # all sources are compiled into a single binary file - ), + [tool.setuptools] + ext-modules = [ + {name = "mylib.foo", sources = ["foo.c"]} ] - ) + +.. tab:: setup.py + + .. code-block:: python + + from setuptools import Extension, setup + + setup( + ext_modules=[ + Extension( + name="mylib.foo", + sources=["foo.c"], + ), + ] + ) + +The ``name`` value corresponds to how the extension module would be +imported and may include packages/namespaces separated by ``.``. +The ``sources`` value is a list of all source files that are compiled +into a single binary file. +Optionally any other parameter of :class:`setuptools.Extension` can be defined +in the configuration file (but in the case of ``pyproject.toml`` they must be +written using :wiki:`kebab-case` convention). .. seealso:: You can find more information on the `Python docs about C/C++ extensions`_. @@ -168,6 +187,16 @@ Extension API Reference .. autoclass:: setuptools.Extension +---- + +.. rubric:: Notes + +.. [#pyproject.toml] + Declarative configuration of extension modules via ``pyproject.toml`` was + introduced recently and is still considered experimental. + Therefore it might change in future versions of ``setuptools``. + + .. _Python docs about C/C++ extensions: https://docs.python.org/3/extending/extending.html .. _Cython: https://cython.readthedocs.io/en/stable/index.html .. _directory options: https://gcc.gnu.org/onlinedocs/gcc/Directory-Options.html diff --git a/docs/userguide/pyproject_config.rst b/docs/userguide/pyproject_config.rst index 4f60ad9324..e988fec7ac 100644 --- a/docs/userguide/pyproject_config.rst +++ b/docs/userguide/pyproject_config.rst @@ -88,6 +88,9 @@ file, and can be set via the ``tool.setuptools`` table: Key Value Type (TOML) Notes ========================= =========================== ========================= ``py-modules`` array See tip below. +``ext-modules`` array of **Experimental** - Each item corresponds to a + tables/inline-tables :class:`setuptools.Extension` object and may define + the associated parameters in :wiki:`kebab-case`. ``packages`` array or ``find`` directive See tip below. ``package-dir`` table/inline-table Used when explicitly/manually listing ``packages``. ------------------------- --------------------------- ------------------------- diff --git a/mypy.ini b/mypy.ini index fe6b23844a..6891d0d2d0 100644 --- a/mypy.ini +++ b/mypy.ini @@ -7,6 +7,11 @@ warn_unused_ignores = True warn_redundant_casts = True # required to support namespace packages: https://github.com/python/mypy/issues/14057 explicit_package_bases = True + +disable_error_code = + # Disable due to many false positives + overload-overlap, + exclude = (?x)( # Avoid scanning Python files in generated folders ^build/ @@ -19,8 +24,6 @@ exclude = (?x)( # Duplicate module name | ^pkg_resources/tests/data/my-test-package-source/setup.py$ ) -# Too many false-positives -disable_error_code = overload-overlap # DistributionMetadata.license_files and DistributionMetadata.license_file # are dynamically patched in setuptools/_core_metadata.py diff --git a/newsfragments/+1ac90f4a.feature.rst b/newsfragments/+1ac90f4a.feature.rst new file mode 100644 index 0000000000..d78e4a7f89 --- /dev/null +++ b/newsfragments/+1ac90f4a.feature.rst @@ -0,0 +1 @@ +Declare also the dependencies used by distutils (adds jaraco.collections). \ No newline at end of file diff --git a/newsfragments/2971.removal.rst b/newsfragments/2971.removal.rst new file mode 100644 index 0000000000..940453ab0a --- /dev/null +++ b/newsfragments/2971.removal.rst @@ -0,0 +1 @@ +Removed upload_docs command. \ No newline at end of file diff --git a/newsfragments/4553.feature.rst b/newsfragments/4553.feature.rst deleted file mode 100644 index 43ea1eeac9..0000000000 --- a/newsfragments/4553.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added detection of ARM64 variant of MSVC -- by :user:`saschanaz` diff --git a/newsfragments/4585.feature.rst b/newsfragments/4585.feature.rst deleted file mode 100644 index 566bca75f8..0000000000 --- a/newsfragments/4585.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Made ``setuptools.package_index.Credential`` a `typing.NamedTuple` -- by :user:`Avasam` diff --git a/newsfragments/4592.misc.rst b/newsfragments/4592.misc.rst deleted file mode 100644 index 79d36a9d82..0000000000 --- a/newsfragments/4592.misc.rst +++ /dev/null @@ -1 +0,0 @@ -If somehow the ``EXT_SUFFIX`` configuration variable and ``SETUPTOOLS_EXT_SUFFIX`` environment variables are both missing, ``setuptools.command.build_ext.pyget_ext_filename`` will now raise an `OSError` instead of a `TypeError` -- by :user:`Avasam` diff --git a/newsfragments/4598.feature.rst b/newsfragments/4598.feature.rst deleted file mode 100644 index ee2ea40dfe..0000000000 --- a/newsfragments/4598.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Fully typed all collection attributes in ``pkg_resources`` -- by :user:`Avasam` diff --git a/pyproject.toml b/pyproject.toml index fbcd4b48ef..f623e16445 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ backend-path = ["."] [project] name = "setuptools" -version = "73.0.1" +version = "74.1.2" authors = [ { name = "Python Packaging Authority", email = "distutils-sig@python.org" }, ] @@ -100,6 +100,12 @@ core = [ # pkg_resources "platformdirs >= 2.6.2", + + # for distutils + "jaraco.collections", + "jaraco.functools", + "packaging", + "more_itertools", ] check = [ diff --git a/pyrightconfig.json b/pyrightconfig.json index cd04c33371..27a329e169 100644 --- a/pyrightconfig.json +++ b/pyrightconfig.json @@ -1,12 +1,14 @@ { "$schema": "https://raw.githubusercontent.com/microsoft/pyright/main/packages/vscode-pyright/schemas/pyrightconfig.schema.json", "exclude": [ + // Avoid scanning Python files in generated folders "build", ".tox", ".eggs", - "**/_vendor", // Vendored - "setuptools/_distutils", // Vendored - "setuptools/config/_validate_pyproject/**", // Auto-generated + "setuptools/config/_validate_pyproject/**", + // These are vendored + "**/_vendor", + "setuptools/_distutils", ], // Our testing setup doesn't allow passing CLI arguments, so local devs have to set this manually. // "pythonVersion": "3.8", @@ -15,15 +17,13 @@ "typeCheckingMode": "basic", // Too many issues caused by dynamic patching, still worth fixing when we can "reportAttributeAccessIssue": "warning", - // Fails on Python 3.12 due to missing distutils and on cygwin CI tests - "reportAssignmentType": "warning", + // Fails on Python 3.12 due to missing distutils "reportMissingImports": "warning", - "reportOptionalCall": "warning", // FIXME: A handful of reportOperatorIssue spread throughout the codebase "reportOperatorIssue": "warning", // Deferred initialization (initialize_options/finalize_options) causes many "potentially None" issues - // TODO: Fix with type-guards or by changing how it's initialized - "reportArgumentType": "warning", // A lot of these are caused by jaraco.path.build's spec argument not being a Mapping https://github.com/jaraco/jaraco.path/pull/3 + // TODO: Fix with type-guards, by changing how it's initialized, or by casting initial assignments + "reportArgumentType": "warning", "reportCallIssue": "warning", "reportGeneralTypeIssues": "warning", "reportOptionalIterable": "warning", diff --git a/setuptools/__init__.py b/setuptools/__init__.py index ab373c51d6..3a1cc58aa3 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -111,8 +111,8 @@ def _fetch_build_eggs(dist): def setup(**attrs): - # Make sure we have any requirements needed to interpret 'attrs'. logging.configure() + # Make sure we have any requirements needed to interpret 'attrs'. _install_setup_requires(attrs) return distutils.core.setup(**attrs) diff --git a/setuptools/_distutils/_msvccompiler.py b/setuptools/_distutils/_msvccompiler.py index b0322410c5..03653929a8 100644 --- a/setuptools/_distutils/_msvccompiler.py +++ b/setuptools/_distutils/_msvccompiler.py @@ -3,8 +3,7 @@ Contains MSVCCompiler, an implementation of the abstract CCompiler class for Microsoft Visual Studio 2015. -The module is compatible with VS 2015 and later. You can find legacy support -for older versions in distutils.msvc9compiler and distutils.msvccompiler. +This module requires VS 2015 or later. """ # Written by Perry Stoll @@ -33,7 +32,7 @@ LibError, LinkError, ) -from .util import get_platform +from .util import get_host_platform, get_platform def _find_vc2015(): @@ -79,32 +78,40 @@ def _find_vc2017(): if not root: return None, None - try: - path = subprocess.check_output( - [ - os.path.join( - root, "Microsoft Visual Studio", "Installer", "vswhere.exe" - ), - "-latest", - "-prerelease", - "-requires", - "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", - "-property", - "installationPath", - "-products", - "*", - ], - encoding="mbcs", - errors="strict", - ).strip() - except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): - return None, None + variant = 'arm64' if get_platform() == 'win-arm64' else 'x86.x64' + suitable_components = ( + f"Microsoft.VisualStudio.Component.VC.Tools.{variant}", + "Microsoft.VisualStudio.Workload.WDExpress", + ) + + for component in suitable_components: + # Workaround for `-requiresAny` (only available on VS 2017 > 15.6) + with contextlib.suppress( + subprocess.CalledProcessError, OSError, UnicodeDecodeError + ): + path = ( + subprocess.check_output([ + os.path.join( + root, "Microsoft Visual Studio", "Installer", "vswhere.exe" + ), + "-latest", + "-prerelease", + "-requires", + component, + "-property", + "installationPath", + "-products", + "*", + ]) + .decode(encoding="mbcs", errors="strict") + .strip() + ) - path = os.path.join(path, "VC", "Auxiliary", "Build") - if os.path.isdir(path): - return 15, path + path = os.path.join(path, "VC", "Auxiliary", "Build") + if os.path.isdir(path): + return 15, path - return None, None + return None, None # no suitable component found PLAT_SPEC_TO_RUNTIME = { @@ -140,7 +147,11 @@ def _get_vc_env(plat_spec): vcvarsall, _ = _find_vcvarsall(plat_spec) if not vcvarsall: - raise DistutilsPlatformError("Unable to find vcvarsall.bat") + raise DistutilsPlatformError( + 'Microsoft Visual C++ 14.0 or greater is required. ' + 'Get it with "Microsoft C++ Build Tools": ' + 'https://visualstudio.microsoft.com/visual-cpp-build-tools/' + ) try: out = subprocess.check_output( @@ -148,7 +159,7 @@ def _get_vc_env(plat_spec): stderr=subprocess.STDOUT, ).decode('utf-16le', errors='replace') except subprocess.CalledProcessError as exc: - log.error(exc.output) + log.error(exc.output) # noqa: RUF100, TRY400 raise DistutilsPlatformError(f"Error executing {exc.cmd}") env = { @@ -178,17 +189,43 @@ def _find_exe(exe, paths=None): return exe -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Always cross-compile from x86 to work with the -# lighter-weight MSVC installs that do not include native 64-bit tools. -PLAT_TO_VCVARS = { +_vcvars_names = { 'win32': 'x86', - 'win-amd64': 'x86_amd64', - 'win-arm32': 'x86_arm', - 'win-arm64': 'x86_arm64', + 'win-amd64': 'amd64', + 'win-arm32': 'arm', + 'win-arm64': 'arm64', } +def _get_vcvars_spec(host_platform, platform): + """ + Given a host platform and platform, determine the spec for vcvarsall. + + Uses the native MSVC host if the host platform would need expensive + emulation for x86. + + >>> _get_vcvars_spec('win-arm64', 'win32') + 'arm64_x86' + >>> _get_vcvars_spec('win-arm64', 'win-amd64') + 'arm64_amd64' + + Otherwise, always cross-compile from x86 to work with the + lighter-weight MSVC installs that do not include native 64-bit tools. + + >>> _get_vcvars_spec('win32', 'win32') + 'x86' + >>> _get_vcvars_spec('win-arm32', 'win-arm32') + 'x86_arm' + >>> _get_vcvars_spec('win-amd64', 'win-arm64') + 'x86_arm64' + """ + if host_platform != 'win-arm64': + host_platform = 'win32' + vc_hp = _vcvars_names[host_platform] + vc_plat = _vcvars_names[platform] + return vc_hp if vc_hp == vc_plat else f'{vc_hp}_{vc_plat}' + + class MSVCCompiler(CCompiler): """Concrete class that implements an interface to Microsoft Visual C++, as defined by the CCompiler abstract class.""" @@ -242,13 +279,12 @@ def initialize(self, plat_name=None): if plat_name is None: plat_name = get_platform() # sanity check for platforms to prevent obscure errors later. - if plat_name not in PLAT_TO_VCVARS: + if plat_name not in _vcvars_names: raise DistutilsPlatformError( - f"--plat-name must be one of {tuple(PLAT_TO_VCVARS)}" + f"--plat-name must be one of {tuple(_vcvars_names)}" ) - # Get the vcvarsall.bat spec for the requested platform. - plat_spec = PLAT_TO_VCVARS[plat_name] + plat_spec = _get_vcvars_spec(get_host_platform(), get_platform()) vc_env = _get_vc_env(plat_spec) if not vc_env: diff --git a/setuptools/_distutils/_vendor/__init__.py b/setuptools/_distutils/_vendor/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE deleted file mode 100644 index 6f62d44e4e..0000000000 --- a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made -under the terms of *both* these licenses. diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE deleted file mode 100644 index f433b1a53f..0000000000 --- a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD deleted file mode 100644 index 42ce7b75c9..0000000000 --- a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Donald Stufft and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/METADATA b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/METADATA deleted file mode 100644 index 10ab4390a9..0000000000 --- a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/METADATA +++ /dev/null @@ -1,102 +0,0 @@ -Metadata-Version: 2.1 -Name: packaging -Version: 24.0 -Summary: Core utilities for Python packages -Author-email: Donald Stufft -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: License :: OSI Approved :: BSD License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Typing :: Typed -Project-URL: Documentation, https://packaging.pypa.io/ -Project-URL: Source, https://github.com/pypa/packaging - -packaging -========= - -.. start-intro - -Reusable core utilities for various Python Packaging -`interoperability specifications `_. - -This library provides utilities that implement the interoperability -specifications which have clearly one correct behaviour (eg: :pep:`440`) -or benefit greatly from having a single shared implementation (eg: :pep:`425`). - -.. end-intro - -The ``packaging`` project includes the following: version handling, specifiers, -markers, requirements, tags, utilities. - -Documentation -------------- - -The `documentation`_ provides information and the API for the following: - -- Version Handling -- Specifiers -- Markers -- Requirements -- Tags -- Utilities - -Installation ------------- - -Use ``pip`` to install these utilities:: - - pip install packaging - -The ``packaging`` library uses calendar-based versioning (``YY.N``). - -Discussion ----------- - -If you run into bugs, you can file them in our `issue tracker`_. - -You can also join ``#pypa`` on Freenode to ask questions or get involved. - - -.. _`documentation`: https://packaging.pypa.io/ -.. _`issue tracker`: https://github.com/pypa/packaging/issues - - -Code of Conduct ---------------- - -Everyone interacting in the packaging project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. - -.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md - -Contributing ------------- - -The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as -well as how to report a potential security issue. The documentation for this -project also covers information about `project development`_ and `security`_. - -.. _`project development`: https://packaging.pypa.io/en/latest/development/ -.. _`security`: https://packaging.pypa.io/en/latest/security/ - -Project History ---------------- - -Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for -recent changes and project history. - -.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ - diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/RECORD b/setuptools/_distutils/_vendor/packaging-24.0.dist-info/RECORD deleted file mode 100644 index bcf796c2f4..0000000000 --- a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/RECORD +++ /dev/null @@ -1,37 +0,0 @@ -packaging-24.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -packaging-24.0.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 -packaging-24.0.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 -packaging-24.0.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 -packaging-24.0.dist-info/METADATA,sha256=0dESdhY_wHValuOrbgdebiEw04EbX4dkujlxPdEsFus,3203 -packaging-24.0.dist-info/RECORD,, -packaging-24.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging-24.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -packaging/__init__.py,sha256=UzotcV07p8vcJzd80S-W0srhgY8NMVD_XvJcZ7JN-tA,496 -packaging/__pycache__/__init__.cpython-312.pyc,, -packaging/__pycache__/_elffile.cpython-312.pyc,, -packaging/__pycache__/_manylinux.cpython-312.pyc,, -packaging/__pycache__/_musllinux.cpython-312.pyc,, -packaging/__pycache__/_parser.cpython-312.pyc,, -packaging/__pycache__/_structures.cpython-312.pyc,, -packaging/__pycache__/_tokenizer.cpython-312.pyc,, -packaging/__pycache__/markers.cpython-312.pyc,, -packaging/__pycache__/metadata.cpython-312.pyc,, -packaging/__pycache__/requirements.cpython-312.pyc,, -packaging/__pycache__/specifiers.cpython-312.pyc,, -packaging/__pycache__/tags.cpython-312.pyc,, -packaging/__pycache__/utils.cpython-312.pyc,, -packaging/__pycache__/version.cpython-312.pyc,, -packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266 -packaging/_manylinux.py,sha256=1ng_TqyH49hY6s3W_zVHyoJIaogbJqbIF1jJ0fAehc4,9590 -packaging/_musllinux.py,sha256=kgmBGLFybpy8609-KTvzmt2zChCPWYvhp5BWP4JX7dE,2676 -packaging/_parser.py,sha256=zlsFB1FpMRjkUdQb6WLq7xON52ruQadxFpYsDXWhLb4,10347 -packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 -packaging/_tokenizer.py,sha256=alCtbwXhOFAmFGZ6BQ-wCTSFoRAJ2z-ysIf7__MTJ_k,5292 -packaging/markers.py,sha256=eH-txS2zq1HdNpTd9LcZUcVIwewAiNU0grmq5wjKnOk,8208 -packaging/metadata.py,sha256=w7jPEg6mDf1FTZMn79aFxFuk4SKtynUJtxr2InTxlV4,33036 -packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -packaging/requirements.py,sha256=dgoBeVprPu2YE6Q8nGfwOPTjATHbRa_ZGLyXhFEln6Q,2933 -packaging/specifiers.py,sha256=dB2DwbmvSbEuVilEyiIQ382YfW5JfwzXTfRRPVtaENY,39784 -packaging/tags.py,sha256=fedHXiOHkBxNZTXotXv8uXPmMFU9ae-TKBujgYHigcA,18950 -packaging/utils.py,sha256=XgdmP3yx9-wQEFjO7OvMj9RjEf5JlR5HFFR69v7SQ9E,5268 -packaging/version.py,sha256=XjRBLNK17UMDgLeP8UHnqwiY3TdSi03xFQURtec211A,16236 diff --git a/setuptools/_distutils/_vendor/packaging/__init__.py b/setuptools/_distutils/_vendor/packaging/__init__.py deleted file mode 100644 index e7c0aa12ca..0000000000 --- a/setuptools/_distutils/_vendor/packaging/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "24.0" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD-2-Clause or Apache-2.0" -__copyright__ = "2014 %s" % __author__ diff --git a/setuptools/_distutils/_vendor/packaging/_elffile.py b/setuptools/_distutils/_vendor/packaging/_elffile.py deleted file mode 100644 index 6fb19b30bb..0000000000 --- a/setuptools/_distutils/_vendor/packaging/_elffile.py +++ /dev/null @@ -1,108 +0,0 @@ -""" -ELF file parser. - -This provides a class ``ELFFile`` that parses an ELF executable in a similar -interface to ``ZipFile``. Only the read interface is implemented. - -Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca -ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html -""" - -import enum -import os -import struct -from typing import IO, Optional, Tuple - - -class ELFInvalid(ValueError): - pass - - -class EIClass(enum.IntEnum): - C32 = 1 - C64 = 2 - - -class EIData(enum.IntEnum): - Lsb = 1 - Msb = 2 - - -class EMachine(enum.IntEnum): - I386 = 3 - S390 = 22 - Arm = 40 - X8664 = 62 - AArc64 = 183 - - -class ELFFile: - """ - Representation of an ELF executable. - """ - - def __init__(self, f: IO[bytes]) -> None: - self._f = f - - try: - ident = self._read("16B") - except struct.error: - raise ELFInvalid("unable to parse identification") - magic = bytes(ident[:4]) - if magic != b"\x7fELF": - raise ELFInvalid(f"invalid magic: {magic!r}") - - self.capacity = ident[4] # Format for program header (bitness). - self.encoding = ident[5] # Data structure encoding (endianness). - - try: - # e_fmt: Format for program header. - # p_fmt: Format for section header. - # p_idx: Indexes to find p_type, p_offset, and p_filesz. - e_fmt, self._p_fmt, self._p_idx = { - (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. - (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. - }[(self.capacity, self.encoding)] - except KeyError: - raise ELFInvalid( - f"unrecognized capacity ({self.capacity}) or " - f"encoding ({self.encoding})" - ) - - try: - ( - _, - self.machine, # Architecture type. - _, - _, - self._e_phoff, # Offset of program header. - _, - self.flags, # Processor-specific flags. - _, - self._e_phentsize, # Size of section. - self._e_phnum, # Number of sections. - ) = self._read(e_fmt) - except struct.error as e: - raise ELFInvalid("unable to parse machine and section information") from e - - def _read(self, fmt: str) -> Tuple[int, ...]: - return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) - - @property - def interpreter(self) -> Optional[str]: - """ - The path recorded in the ``PT_INTERP`` section header. - """ - for index in range(self._e_phnum): - self._f.seek(self._e_phoff + self._e_phentsize * index) - try: - data = self._read(self._p_fmt) - except struct.error: - continue - if data[self._p_idx[0]] != 3: # Not PT_INTERP. - continue - self._f.seek(data[self._p_idx[1]]) - return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") - return None diff --git a/setuptools/_distutils/_vendor/packaging/_manylinux.py b/setuptools/_distutils/_vendor/packaging/_manylinux.py deleted file mode 100644 index ad62505f3f..0000000000 --- a/setuptools/_distutils/_vendor/packaging/_manylinux.py +++ /dev/null @@ -1,260 +0,0 @@ -import collections -import contextlib -import functools -import os -import re -import sys -import warnings -from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple - -from ._elffile import EIClass, EIData, ELFFile, EMachine - -EF_ARM_ABIMASK = 0xFF000000 -EF_ARM_ABI_VER5 = 0x05000000 -EF_ARM_ABI_FLOAT_HARD = 0x00000400 - - -# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` -# as the type for `path` until then. -@contextlib.contextmanager -def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: - try: - with open(path, "rb") as f: - yield ELFFile(f) - except (OSError, TypeError, ValueError): - yield None - - -def _is_linux_armhf(executable: str) -> bool: - # hard-float ABI can be detected from the ELF header of the running - # process - # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf - with _parse_elf(executable) as f: - return ( - f is not None - and f.capacity == EIClass.C32 - and f.encoding == EIData.Lsb - and f.machine == EMachine.Arm - and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 - and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD - ) - - -def _is_linux_i686(executable: str) -> bool: - with _parse_elf(executable) as f: - return ( - f is not None - and f.capacity == EIClass.C32 - and f.encoding == EIData.Lsb - and f.machine == EMachine.I386 - ) - - -def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: - if "armv7l" in archs: - return _is_linux_armhf(executable) - if "i686" in archs: - return _is_linux_i686(executable) - allowed_archs = { - "x86_64", - "aarch64", - "ppc64", - "ppc64le", - "s390x", - "loongarch64", - "riscv64", - } - return any(arch in allowed_archs for arch in archs) - - -# If glibc ever changes its major version, we need to know what the last -# minor version was, so we can build the complete list of all versions. -# For now, guess what the highest minor version might be, assume it will -# be 50 for testing. Once this actually happens, update the dictionary -# with the actual value. -_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) - - -class _GLibCVersion(NamedTuple): - major: int - minor: int - - -def _glibc_version_string_confstr() -> Optional[str]: - """ - Primary implementation of glibc_version_string using os.confstr. - """ - # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely - # to be broken or missing. This strategy is used in the standard library - # platform module. - # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 - try: - # Should be a string like "glibc 2.17". - version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION") - assert version_string is not None - _, version = version_string.rsplit() - except (AssertionError, AttributeError, OSError, ValueError): - # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... - return None - return version - - -def _glibc_version_string_ctypes() -> Optional[str]: - """ - Fallback implementation of glibc_version_string using ctypes. - """ - try: - import ctypes - except ImportError: - return None - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - # - # We must also handle the special case where the executable is not a - # dynamically linked executable. This can occur when using musl libc, - # for example. In this situation, dlopen() will error, leading to an - # OSError. Interestingly, at least in the case of musl, there is no - # errno set on the OSError. The single string argument used to construct - # OSError comes from libc itself and is therefore not portable to - # hard code here. In any case, failure to call dlopen() means we - # can proceed, so we bail on our attempt. - try: - process_namespace = ctypes.CDLL(None) - except OSError: - return None - - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str: str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -def _glibc_version_string() -> Optional[str]: - """Returns glibc version string, or None if not using glibc.""" - return _glibc_version_string_confstr() or _glibc_version_string_ctypes() - - -def _parse_glibc_version(version_str: str) -> Tuple[int, int]: - """Parse glibc version. - - We use a regexp instead of str.split because we want to discard any - random junk that might come after the minor version -- this might happen - in patched/forked versions of glibc (e.g. Linaro's version of glibc - uses version strings like "2.20-2014.11"). See gh-3588. - """ - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn( - f"Expected glibc version with 2 components major.minor," - f" got: {version_str}", - RuntimeWarning, - ) - return -1, -1 - return int(m.group("major")), int(m.group("minor")) - - -@functools.lru_cache() -def _get_glibc_version() -> Tuple[int, int]: - version_str = _glibc_version_string() - if version_str is None: - return (-1, -1) - return _parse_glibc_version(version_str) - - -# From PEP 513, PEP 600 -def _is_compatible(arch: str, version: _GLibCVersion) -> bool: - sys_glibc = _get_glibc_version() - if sys_glibc < version: - return False - # Check for presence of _manylinux module. - try: - import _manylinux - except ImportError: - return True - if hasattr(_manylinux, "manylinux_compatible"): - result = _manylinux.manylinux_compatible(version[0], version[1], arch) - if result is not None: - return bool(result) - return True - if version == _GLibCVersion(2, 5): - if hasattr(_manylinux, "manylinux1_compatible"): - return bool(_manylinux.manylinux1_compatible) - if version == _GLibCVersion(2, 12): - if hasattr(_manylinux, "manylinux2010_compatible"): - return bool(_manylinux.manylinux2010_compatible) - if version == _GLibCVersion(2, 17): - if hasattr(_manylinux, "manylinux2014_compatible"): - return bool(_manylinux.manylinux2014_compatible) - return True - - -_LEGACY_MANYLINUX_MAP = { - # CentOS 7 w/ glibc 2.17 (PEP 599) - (2, 17): "manylinux2014", - # CentOS 6 w/ glibc 2.12 (PEP 571) - (2, 12): "manylinux2010", - # CentOS 5 w/ glibc 2.5 (PEP 513) - (2, 5): "manylinux1", -} - - -def platform_tags(archs: Sequence[str]) -> Iterator[str]: - """Generate manylinux tags compatible to the current platform. - - :param archs: Sequence of compatible architectures. - The first one shall be the closest to the actual architecture and be the part of - platform tag after the ``linux_`` prefix, e.g. ``x86_64``. - The ``linux_`` prefix is assumed as a prerequisite for the current platform to - be manylinux-compatible. - - :returns: An iterator of compatible manylinux tags. - """ - if not _have_compatible_abi(sys.executable, archs): - return - # Oldest glibc to be supported regardless of architecture is (2, 17). - too_old_glibc2 = _GLibCVersion(2, 16) - if set(archs) & {"x86_64", "i686"}: - # On x86/i686 also oldest glibc to be supported is (2, 5). - too_old_glibc2 = _GLibCVersion(2, 4) - current_glibc = _GLibCVersion(*_get_glibc_version()) - glibc_max_list = [current_glibc] - # We can assume compatibility across glibc major versions. - # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 - # - # Build a list of maximum glibc versions so that we can - # output the canonical list of all glibc from current_glibc - # down to too_old_glibc2, including all intermediary versions. - for glibc_major in range(current_glibc.major - 1, 1, -1): - glibc_minor = _LAST_GLIBC_MINOR[glibc_major] - glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) - for arch in archs: - for glibc_max in glibc_max_list: - if glibc_max.major == too_old_glibc2.major: - min_minor = too_old_glibc2.minor - else: - # For other glibc major versions oldest supported is (x, 0). - min_minor = -1 - for glibc_minor in range(glibc_max.minor, min_minor, -1): - glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) - tag = "manylinux_{}_{}".format(*glibc_version) - if _is_compatible(arch, glibc_version): - yield f"{tag}_{arch}" - # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. - if glibc_version in _LEGACY_MANYLINUX_MAP: - legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] - if _is_compatible(arch, glibc_version): - yield f"{legacy_tag}_{arch}" diff --git a/setuptools/_distutils/_vendor/packaging/_musllinux.py b/setuptools/_distutils/_vendor/packaging/_musllinux.py deleted file mode 100644 index 86419df9d7..0000000000 --- a/setuptools/_distutils/_vendor/packaging/_musllinux.py +++ /dev/null @@ -1,83 +0,0 @@ -"""PEP 656 support. - -This module implements logic to detect if the currently running Python is -linked against musl, and what musl version is used. -""" - -import functools -import re -import subprocess -import sys -from typing import Iterator, NamedTuple, Optional, Sequence - -from ._elffile import ELFFile - - -class _MuslVersion(NamedTuple): - major: int - minor: int - - -def _parse_musl_version(output: str) -> Optional[_MuslVersion]: - lines = [n for n in (n.strip() for n in output.splitlines()) if n] - if len(lines) < 2 or lines[0][:4] != "musl": - return None - m = re.match(r"Version (\d+)\.(\d+)", lines[1]) - if not m: - return None - return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) - - -@functools.lru_cache() -def _get_musl_version(executable: str) -> Optional[_MuslVersion]: - """Detect currently-running musl runtime version. - - This is done by checking the specified executable's dynamic linking - information, and invoking the loader to parse its output for a version - string. If the loader is musl, the output would be something like:: - - musl libc (x86_64) - Version 1.2.2 - Dynamic Program Loader - """ - try: - with open(executable, "rb") as f: - ld = ELFFile(f).interpreter - except (OSError, TypeError, ValueError): - return None - if ld is None or "musl" not in ld: - return None - proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) - return _parse_musl_version(proc.stderr) - - -def platform_tags(archs: Sequence[str]) -> Iterator[str]: - """Generate musllinux tags compatible to the current platform. - - :param archs: Sequence of compatible architectures. - The first one shall be the closest to the actual architecture and be the part of - platform tag after the ``linux_`` prefix, e.g. ``x86_64``. - The ``linux_`` prefix is assumed as a prerequisite for the current platform to - be musllinux-compatible. - - :returns: An iterator of compatible musllinux tags. - """ - sys_musl = _get_musl_version(sys.executable) - if sys_musl is None: # Python not dynamically linked against musl. - return - for arch in archs: - for minor in range(sys_musl.minor, -1, -1): - yield f"musllinux_{sys_musl.major}_{minor}_{arch}" - - -if __name__ == "__main__": # pragma: no cover - import sysconfig - - plat = sysconfig.get_platform() - assert plat.startswith("linux-"), "not linux" - - print("plat:", plat) - print("musl:", _get_musl_version(sys.executable)) - print("tags:", end=" ") - for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): - print(t, end="\n ") diff --git a/setuptools/_distutils/_vendor/packaging/_parser.py b/setuptools/_distutils/_vendor/packaging/_parser.py deleted file mode 100644 index 684df75457..0000000000 --- a/setuptools/_distutils/_vendor/packaging/_parser.py +++ /dev/null @@ -1,356 +0,0 @@ -"""Handwritten parser of dependency specifiers. - -The docstring for each __parse_* function contains ENBF-inspired grammar representing -the implementation. -""" - -import ast -from typing import Any, List, NamedTuple, Optional, Tuple, Union - -from ._tokenizer import DEFAULT_RULES, Tokenizer - - -class Node: - def __init__(self, value: str) -> None: - self.value = value - - def __str__(self) -> str: - return self.value - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}('{self}')>" - - def serialize(self) -> str: - raise NotImplementedError - - -class Variable(Node): - def serialize(self) -> str: - return str(self) - - -class Value(Node): - def serialize(self) -> str: - return f'"{self}"' - - -class Op(Node): - def serialize(self) -> str: - return str(self) - - -MarkerVar = Union[Variable, Value] -MarkerItem = Tuple[MarkerVar, Op, MarkerVar] -# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] -# MarkerList = List[Union["MarkerList", MarkerAtom, str]] -# mypy does not support recursive type definition -# https://github.com/python/mypy/issues/731 -MarkerAtom = Any -MarkerList = List[Any] - - -class ParsedRequirement(NamedTuple): - name: str - url: str - extras: List[str] - specifier: str - marker: Optional[MarkerList] - - -# -------------------------------------------------------------------------------------- -# Recursive descent parser for dependency specifier -# -------------------------------------------------------------------------------------- -def parse_requirement(source: str) -> ParsedRequirement: - return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) - - -def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: - """ - requirement = WS? IDENTIFIER WS? extras WS? requirement_details - """ - tokenizer.consume("WS") - - name_token = tokenizer.expect( - "IDENTIFIER", expected="package name at the start of dependency specifier" - ) - name = name_token.text - tokenizer.consume("WS") - - extras = _parse_extras(tokenizer) - tokenizer.consume("WS") - - url, specifier, marker = _parse_requirement_details(tokenizer) - tokenizer.expect("END", expected="end of dependency specifier") - - return ParsedRequirement(name, url, extras, specifier, marker) - - -def _parse_requirement_details( - tokenizer: Tokenizer, -) -> Tuple[str, str, Optional[MarkerList]]: - """ - requirement_details = AT URL (WS requirement_marker?)? - | specifier WS? (requirement_marker)? - """ - - specifier = "" - url = "" - marker = None - - if tokenizer.check("AT"): - tokenizer.read() - tokenizer.consume("WS") - - url_start = tokenizer.position - url = tokenizer.expect("URL", expected="URL after @").text - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - tokenizer.expect("WS", expected="whitespace after URL") - - # The input might end after whitespace. - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - marker = _parse_requirement_marker( - tokenizer, span_start=url_start, after="URL and whitespace" - ) - else: - specifier_start = tokenizer.position - specifier = _parse_specifier(tokenizer) - tokenizer.consume("WS") - - if tokenizer.check("END", peek=True): - return (url, specifier, marker) - - marker = _parse_requirement_marker( - tokenizer, - span_start=specifier_start, - after=( - "version specifier" - if specifier - else "name and no valid version specifier" - ), - ) - - return (url, specifier, marker) - - -def _parse_requirement_marker( - tokenizer: Tokenizer, *, span_start: int, after: str -) -> MarkerList: - """ - requirement_marker = SEMICOLON marker WS? - """ - - if not tokenizer.check("SEMICOLON"): - tokenizer.raise_syntax_error( - f"Expected end or semicolon (after {after})", - span_start=span_start, - ) - tokenizer.read() - - marker = _parse_marker(tokenizer) - tokenizer.consume("WS") - - return marker - - -def _parse_extras(tokenizer: Tokenizer) -> List[str]: - """ - extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? - """ - if not tokenizer.check("LEFT_BRACKET", peek=True): - return [] - - with tokenizer.enclosing_tokens( - "LEFT_BRACKET", - "RIGHT_BRACKET", - around="extras", - ): - tokenizer.consume("WS") - extras = _parse_extras_list(tokenizer) - tokenizer.consume("WS") - - return extras - - -def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: - """ - extras_list = identifier (wsp* ',' wsp* identifier)* - """ - extras: List[str] = [] - - if not tokenizer.check("IDENTIFIER"): - return extras - - extras.append(tokenizer.read().text) - - while True: - tokenizer.consume("WS") - if tokenizer.check("IDENTIFIER", peek=True): - tokenizer.raise_syntax_error("Expected comma between extra names") - elif not tokenizer.check("COMMA"): - break - - tokenizer.read() - tokenizer.consume("WS") - - extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") - extras.append(extra_token.text) - - return extras - - -def _parse_specifier(tokenizer: Tokenizer) -> str: - """ - specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS - | WS? version_many WS? - """ - with tokenizer.enclosing_tokens( - "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", - around="version specifier", - ): - tokenizer.consume("WS") - parsed_specifiers = _parse_version_many(tokenizer) - tokenizer.consume("WS") - - return parsed_specifiers - - -def _parse_version_many(tokenizer: Tokenizer) -> str: - """ - version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? - """ - parsed_specifiers = "" - while tokenizer.check("SPECIFIER"): - span_start = tokenizer.position - parsed_specifiers += tokenizer.read().text - if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): - tokenizer.raise_syntax_error( - ".* suffix can only be used with `==` or `!=` operators", - span_start=span_start, - span_end=tokenizer.position + 1, - ) - if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): - tokenizer.raise_syntax_error( - "Local version label can only be used with `==` or `!=` operators", - span_start=span_start, - span_end=tokenizer.position, - ) - tokenizer.consume("WS") - if not tokenizer.check("COMMA"): - break - parsed_specifiers += tokenizer.read().text - tokenizer.consume("WS") - - return parsed_specifiers - - -# -------------------------------------------------------------------------------------- -# Recursive descent parser for marker expression -# -------------------------------------------------------------------------------------- -def parse_marker(source: str) -> MarkerList: - return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) - - -def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: - retval = _parse_marker(tokenizer) - tokenizer.expect("END", expected="end of marker expression") - return retval - - -def _parse_marker(tokenizer: Tokenizer) -> MarkerList: - """ - marker = marker_atom (BOOLOP marker_atom)+ - """ - expression = [_parse_marker_atom(tokenizer)] - while tokenizer.check("BOOLOP"): - token = tokenizer.read() - expr_right = _parse_marker_atom(tokenizer) - expression.extend((token.text, expr_right)) - return expression - - -def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: - """ - marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? - | WS? marker_item WS? - """ - - tokenizer.consume("WS") - if tokenizer.check("LEFT_PARENTHESIS", peek=True): - with tokenizer.enclosing_tokens( - "LEFT_PARENTHESIS", - "RIGHT_PARENTHESIS", - around="marker expression", - ): - tokenizer.consume("WS") - marker: MarkerAtom = _parse_marker(tokenizer) - tokenizer.consume("WS") - else: - marker = _parse_marker_item(tokenizer) - tokenizer.consume("WS") - return marker - - -def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: - """ - marker_item = WS? marker_var WS? marker_op WS? marker_var WS? - """ - tokenizer.consume("WS") - marker_var_left = _parse_marker_var(tokenizer) - tokenizer.consume("WS") - marker_op = _parse_marker_op(tokenizer) - tokenizer.consume("WS") - marker_var_right = _parse_marker_var(tokenizer) - tokenizer.consume("WS") - return (marker_var_left, marker_op, marker_var_right) - - -def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: - """ - marker_var = VARIABLE | QUOTED_STRING - """ - if tokenizer.check("VARIABLE"): - return process_env_var(tokenizer.read().text.replace(".", "_")) - elif tokenizer.check("QUOTED_STRING"): - return process_python_str(tokenizer.read().text) - else: - tokenizer.raise_syntax_error( - message="Expected a marker variable or quoted string" - ) - - -def process_env_var(env_var: str) -> Variable: - if env_var in ("platform_python_implementation", "python_implementation"): - return Variable("platform_python_implementation") - else: - return Variable(env_var) - - -def process_python_str(python_str: str) -> Value: - value = ast.literal_eval(python_str) - return Value(str(value)) - - -def _parse_marker_op(tokenizer: Tokenizer) -> Op: - """ - marker_op = IN | NOT IN | OP - """ - if tokenizer.check("IN"): - tokenizer.read() - return Op("in") - elif tokenizer.check("NOT"): - tokenizer.read() - tokenizer.expect("WS", expected="whitespace after 'not'") - tokenizer.expect("IN", expected="'in' after 'not'") - return Op("not in") - elif tokenizer.check("OP"): - return Op(tokenizer.read().text) - else: - return tokenizer.raise_syntax_error( - "Expected marker operator, one of " - "<=, <, !=, ==, >=, >, ~=, ===, in, not in" - ) diff --git a/setuptools/_distutils/_vendor/packaging/_structures.py b/setuptools/_distutils/_vendor/packaging/_structures.py deleted file mode 100644 index 90a6465f96..0000000000 --- a/setuptools/_distutils/_vendor/packaging/_structures.py +++ /dev/null @@ -1,61 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - - -class InfinityType: - def __repr__(self) -> str: - return "Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return False - - def __le__(self, other: object) -> bool: - return False - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return True - - def __ge__(self, other: object) -> bool: - return True - - def __neg__(self: object) -> "NegativeInfinityType": - return NegativeInfinity - - -Infinity = InfinityType() - - -class NegativeInfinityType: - def __repr__(self) -> str: - return "-Infinity" - - def __hash__(self) -> int: - return hash(repr(self)) - - def __lt__(self, other: object) -> bool: - return True - - def __le__(self, other: object) -> bool: - return True - - def __eq__(self, other: object) -> bool: - return isinstance(other, self.__class__) - - def __gt__(self, other: object) -> bool: - return False - - def __ge__(self, other: object) -> bool: - return False - - def __neg__(self: object) -> InfinityType: - return Infinity - - -NegativeInfinity = NegativeInfinityType() diff --git a/setuptools/_distutils/_vendor/packaging/_tokenizer.py b/setuptools/_distutils/_vendor/packaging/_tokenizer.py deleted file mode 100644 index dd0d648d49..0000000000 --- a/setuptools/_distutils/_vendor/packaging/_tokenizer.py +++ /dev/null @@ -1,192 +0,0 @@ -import contextlib -import re -from dataclasses import dataclass -from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union - -from .specifiers import Specifier - - -@dataclass -class Token: - name: str - text: str - position: int - - -class ParserSyntaxError(Exception): - """The provided source text could not be parsed correctly.""" - - def __init__( - self, - message: str, - *, - source: str, - span: Tuple[int, int], - ) -> None: - self.span = span - self.message = message - self.source = source - - super().__init__() - - def __str__(self) -> str: - marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" - return "\n ".join([self.message, self.source, marker]) - - -DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { - "LEFT_PARENTHESIS": r"\(", - "RIGHT_PARENTHESIS": r"\)", - "LEFT_BRACKET": r"\[", - "RIGHT_BRACKET": r"\]", - "SEMICOLON": r";", - "COMMA": r",", - "QUOTED_STRING": re.compile( - r""" - ( - ('[^']*') - | - ("[^"]*") - ) - """, - re.VERBOSE, - ), - "OP": r"(===|==|~=|!=|<=|>=|<|>)", - "BOOLOP": r"\b(or|and)\b", - "IN": r"\bin\b", - "NOT": r"\bnot\b", - "VARIABLE": re.compile( - r""" - \b( - python_version - |python_full_version - |os[._]name - |sys[._]platform - |platform_(release|system) - |platform[._](version|machine|python_implementation) - |python_implementation - |implementation_(name|version) - |extra - )\b - """, - re.VERBOSE, - ), - "SPECIFIER": re.compile( - Specifier._operator_regex_str + Specifier._version_regex_str, - re.VERBOSE | re.IGNORECASE, - ), - "AT": r"\@", - "URL": r"[^ \t]+", - "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", - "VERSION_PREFIX_TRAIL": r"\.\*", - "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*", - "WS": r"[ \t]+", - "END": r"$", -} - - -class Tokenizer: - """Context-sensitive token parsing. - - Provides methods to examine the input stream to check whether the next token - matches. - """ - - def __init__( - self, - source: str, - *, - rules: "Dict[str, Union[str, re.Pattern[str]]]", - ) -> None: - self.source = source - self.rules: Dict[str, re.Pattern[str]] = { - name: re.compile(pattern) for name, pattern in rules.items() - } - self.next_token: Optional[Token] = None - self.position = 0 - - def consume(self, name: str) -> None: - """Move beyond provided token name, if at current position.""" - if self.check(name): - self.read() - - def check(self, name: str, *, peek: bool = False) -> bool: - """Check whether the next token has the provided name. - - By default, if the check succeeds, the token *must* be read before - another check. If `peek` is set to `True`, the token is not loaded and - would need to be checked again. - """ - assert ( - self.next_token is None - ), f"Cannot check for {name!r}, already have {self.next_token!r}" - assert name in self.rules, f"Unknown token name: {name!r}" - - expression = self.rules[name] - - match = expression.match(self.source, self.position) - if match is None: - return False - if not peek: - self.next_token = Token(name, match[0], self.position) - return True - - def expect(self, name: str, *, expected: str) -> Token: - """Expect a certain token name next, failing with a syntax error otherwise. - - The token is *not* read. - """ - if not self.check(name): - raise self.raise_syntax_error(f"Expected {expected}") - return self.read() - - def read(self) -> Token: - """Consume the next token and return it.""" - token = self.next_token - assert token is not None - - self.position += len(token.text) - self.next_token = None - - return token - - def raise_syntax_error( - self, - message: str, - *, - span_start: Optional[int] = None, - span_end: Optional[int] = None, - ) -> NoReturn: - """Raise ParserSyntaxError at the given position.""" - span = ( - self.position if span_start is None else span_start, - self.position if span_end is None else span_end, - ) - raise ParserSyntaxError( - message, - source=self.source, - span=span, - ) - - @contextlib.contextmanager - def enclosing_tokens( - self, open_token: str, close_token: str, *, around: str - ) -> Iterator[None]: - if self.check(open_token): - open_position = self.position - self.read() - else: - open_position = None - - yield - - if open_position is None: - return - - if not self.check(close_token): - self.raise_syntax_error( - f"Expected matching {close_token} for {open_token}, after {around}", - span_start=open_position, - ) - - self.read() diff --git a/setuptools/_distutils/_vendor/packaging/markers.py b/setuptools/_distutils/_vendor/packaging/markers.py deleted file mode 100644 index 8b98fca723..0000000000 --- a/setuptools/_distutils/_vendor/packaging/markers.py +++ /dev/null @@ -1,252 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import operator -import os -import platform -import sys -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - -from ._parser import ( - MarkerAtom, - MarkerList, - Op, - Value, - Variable, - parse_marker as _parse_marker, -) -from ._tokenizer import ParserSyntaxError -from .specifiers import InvalidSpecifier, Specifier -from .utils import canonicalize_name - -__all__ = [ - "InvalidMarker", - "UndefinedComparison", - "UndefinedEnvironmentName", - "Marker", - "default_environment", -] - -Operator = Callable[[str, str], bool] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -def _normalize_extra_values(results: Any) -> Any: - """ - Normalize extra values. - """ - if isinstance(results[0], tuple): - lhs, op, rhs = results[0] - if isinstance(lhs, Variable) and lhs.value == "extra": - normalized_extra = canonicalize_name(rhs.value) - rhs = Value(normalized_extra) - elif isinstance(rhs, Variable) and rhs.value == "extra": - normalized_extra = canonicalize_name(lhs.value) - lhs = Value(normalized_extra) - results[0] = lhs, op, rhs - return results - - -def _format_marker( - marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True -) -> str: - - assert isinstance(marker, (list, tuple, str)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if ( - isinstance(marker, list) - and len(marker) == 1 - and isinstance(marker[0], (list, tuple)) - ): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators: Dict[str, Operator] = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs: str, op: Op, rhs: str) -> bool: - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs, prereleases=True) - - oper: Optional[Operator] = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") - - return oper(lhs, rhs) - - -def _normalize(*values: str, key: str) -> Tuple[str, ...]: - # PEP 685 – Comparison of extra names for optional distribution dependencies - # https://peps.python.org/pep-0685/ - # > When comparing extra names, tools MUST normalize the names being - # > compared using the semantics outlined in PEP 503 for names - if key == "extra": - return tuple(canonicalize_name(v) for v in values) - - # other environment markers don't have such standards - return values - - -def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: - groups: List[List[bool]] = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, str)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - environment_key = lhs.value - lhs_value = environment[environment_key] - rhs_value = rhs.value - else: - lhs_value = lhs.value - environment_key = rhs.value - rhs_value = environment[environment_key] - - lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info: "sys._version_info") -> str: - version = "{0.major}.{0.minor}.{0.micro}".format(info) - kind = info.releaselevel - if kind != "final": - version += kind[0] + str(info.serial) - return version - - -def default_environment() -> Dict[str, str]: - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": ".".join(platform.python_version_tuple()[:2]), - "sys_platform": sys.platform, - } - - -class Marker: - def __init__(self, marker: str) -> None: - # Note: We create a Marker object without calling this constructor in - # packaging.requirements.Requirement. If any additional logic is - # added here, make sure to mirror/adapt Requirement. - try: - self._markers = _normalize_extra_values(_parse_marker(marker)) - # The attribute `_markers` can be described in terms of a recursive type: - # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] - # - # For example, the following expression: - # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") - # - # is parsed into: - # [ - # (, ')>, ), - # 'and', - # [ - # (, , ), - # 'or', - # (, , ) - # ] - # ] - except ParserSyntaxError as e: - raise InvalidMarker(str(e)) from e - - def __str__(self) -> str: - return _format_marker(self._markers) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash((self.__class__.__name__, str(self))) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Marker): - return NotImplemented - - return str(self) == str(other) - - def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - current_environment["extra"] = "" - if environment is not None: - current_environment.update(environment) - # The API used to allow setting extra to None. We need to handle this - # case for backwards compatibility. - if current_environment["extra"] is None: - current_environment["extra"] = "" - - return _evaluate_markers(self._markers, current_environment) diff --git a/setuptools/_distutils/_vendor/packaging/metadata.py b/setuptools/_distutils/_vendor/packaging/metadata.py deleted file mode 100644 index fb27493079..0000000000 --- a/setuptools/_distutils/_vendor/packaging/metadata.py +++ /dev/null @@ -1,825 +0,0 @@ -import email.feedparser -import email.header -import email.message -import email.parser -import email.policy -import sys -import typing -from typing import ( - Any, - Callable, - Dict, - Generic, - List, - Optional, - Tuple, - Type, - Union, - cast, -) - -from . import requirements, specifiers, utils, version as version_module - -T = typing.TypeVar("T") -if sys.version_info[:2] >= (3, 8): # pragma: no cover - from typing import Literal, TypedDict -else: # pragma: no cover - if typing.TYPE_CHECKING: - from typing_extensions import Literal, TypedDict - else: - try: - from typing_extensions import Literal, TypedDict - except ImportError: - - class Literal: - def __init_subclass__(*_args, **_kwargs): - pass - - class TypedDict: - def __init_subclass__(*_args, **_kwargs): - pass - - -try: - ExceptionGroup -except NameError: # pragma: no cover - - class ExceptionGroup(Exception): # noqa: N818 - """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. - - If :external:exc:`ExceptionGroup` is already defined by Python itself, - that version is used instead. - """ - - message: str - exceptions: List[Exception] - - def __init__(self, message: str, exceptions: List[Exception]) -> None: - self.message = message - self.exceptions = exceptions - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" - -else: # pragma: no cover - ExceptionGroup = ExceptionGroup - - -class InvalidMetadata(ValueError): - """A metadata field contains invalid data.""" - - field: str - """The name of the field that contains invalid data.""" - - def __init__(self, field: str, message: str) -> None: - self.field = field - super().__init__(message) - - -# The RawMetadata class attempts to make as few assumptions about the underlying -# serialization formats as possible. The idea is that as long as a serialization -# formats offer some very basic primitives in *some* way then we can support -# serializing to and from that format. -class RawMetadata(TypedDict, total=False): - """A dictionary of raw core metadata. - - Each field in core metadata maps to a key of this dictionary (when data is - provided). The key is lower-case and underscores are used instead of dashes - compared to the equivalent core metadata field. Any core metadata field that - can be specified multiple times or can hold multiple values in a single - field have a key with a plural name. See :class:`Metadata` whose attributes - match the keys of this dictionary. - - Core metadata fields that can be specified multiple times are stored as a - list or dict depending on which is appropriate for the field. Any fields - which hold multiple values in a single field are stored as a list. - - """ - - # Metadata 1.0 - PEP 241 - metadata_version: str - name: str - version: str - platforms: List[str] - summary: str - description: str - keywords: List[str] - home_page: str - author: str - author_email: str - license: str - - # Metadata 1.1 - PEP 314 - supported_platforms: List[str] - download_url: str - classifiers: List[str] - requires: List[str] - provides: List[str] - obsoletes: List[str] - - # Metadata 1.2 - PEP 345 - maintainer: str - maintainer_email: str - requires_dist: List[str] - provides_dist: List[str] - obsoletes_dist: List[str] - requires_python: str - requires_external: List[str] - project_urls: Dict[str, str] - - # Metadata 2.0 - # PEP 426 attempted to completely revamp the metadata format - # but got stuck without ever being able to build consensus on - # it and ultimately ended up withdrawn. - # - # However, a number of tools had started emitting METADATA with - # `2.0` Metadata-Version, so for historical reasons, this version - # was skipped. - - # Metadata 2.1 - PEP 566 - description_content_type: str - provides_extra: List[str] - - # Metadata 2.2 - PEP 643 - dynamic: List[str] - - # Metadata 2.3 - PEP 685 - # No new fields were added in PEP 685, just some edge case were - # tightened up to provide better interoptability. - - -_STRING_FIELDS = { - "author", - "author_email", - "description", - "description_content_type", - "download_url", - "home_page", - "license", - "maintainer", - "maintainer_email", - "metadata_version", - "name", - "requires_python", - "summary", - "version", -} - -_LIST_FIELDS = { - "classifiers", - "dynamic", - "obsoletes", - "obsoletes_dist", - "platforms", - "provides", - "provides_dist", - "provides_extra", - "requires", - "requires_dist", - "requires_external", - "supported_platforms", -} - -_DICT_FIELDS = { - "project_urls", -} - - -def _parse_keywords(data: str) -> List[str]: - """Split a string of comma-separate keyboards into a list of keywords.""" - return [k.strip() for k in data.split(",")] - - -def _parse_project_urls(data: List[str]) -> Dict[str, str]: - """Parse a list of label/URL string pairings separated by a comma.""" - urls = {} - for pair in data: - # Our logic is slightly tricky here as we want to try and do - # *something* reasonable with malformed data. - # - # The main thing that we have to worry about, is data that does - # not have a ',' at all to split the label from the Value. There - # isn't a singular right answer here, and we will fail validation - # later on (if the caller is validating) so it doesn't *really* - # matter, but since the missing value has to be an empty str - # and our return value is dict[str, str], if we let the key - # be the missing value, then they'd have multiple '' values that - # overwrite each other in a accumulating dict. - # - # The other potentional issue is that it's possible to have the - # same label multiple times in the metadata, with no solid "right" - # answer with what to do in that case. As such, we'll do the only - # thing we can, which is treat the field as unparseable and add it - # to our list of unparsed fields. - parts = [p.strip() for p in pair.split(",", 1)] - parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items - - # TODO: The spec doesn't say anything about if the keys should be - # considered case sensitive or not... logically they should - # be case-preserving and case-insensitive, but doing that - # would open up more cases where we might have duplicate - # entries. - label, url = parts - if label in urls: - # The label already exists in our set of urls, so this field - # is unparseable, and we can just add the whole thing to our - # unparseable data and stop processing it. - raise KeyError("duplicate labels in project urls") - urls[label] = url - - return urls - - -def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str: - """Get the body of the message.""" - # If our source is a str, then our caller has managed encodings for us, - # and we don't need to deal with it. - if isinstance(source, str): - payload: str = msg.get_payload() - return payload - # If our source is a bytes, then we're managing the encoding and we need - # to deal with it. - else: - bpayload: bytes = msg.get_payload(decode=True) - try: - return bpayload.decode("utf8", "strict") - except UnicodeDecodeError: - raise ValueError("payload in an invalid encoding") - - -# The various parse_FORMAT functions here are intended to be as lenient as -# possible in their parsing, while still returning a correctly typed -# RawMetadata. -# -# To aid in this, we also generally want to do as little touching of the -# data as possible, except where there are possibly some historic holdovers -# that make valid data awkward to work with. -# -# While this is a lower level, intermediate format than our ``Metadata`` -# class, some light touch ups can make a massive difference in usability. - -# Map METADATA fields to RawMetadata. -_EMAIL_TO_RAW_MAPPING = { - "author": "author", - "author-email": "author_email", - "classifier": "classifiers", - "description": "description", - "description-content-type": "description_content_type", - "download-url": "download_url", - "dynamic": "dynamic", - "home-page": "home_page", - "keywords": "keywords", - "license": "license", - "maintainer": "maintainer", - "maintainer-email": "maintainer_email", - "metadata-version": "metadata_version", - "name": "name", - "obsoletes": "obsoletes", - "obsoletes-dist": "obsoletes_dist", - "platform": "platforms", - "project-url": "project_urls", - "provides": "provides", - "provides-dist": "provides_dist", - "provides-extra": "provides_extra", - "requires": "requires", - "requires-dist": "requires_dist", - "requires-external": "requires_external", - "requires-python": "requires_python", - "summary": "summary", - "supported-platform": "supported_platforms", - "version": "version", -} -_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} - - -def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]: - """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). - - This function returns a two-item tuple of dicts. The first dict is of - recognized fields from the core metadata specification. Fields that can be - parsed and translated into Python's built-in types are converted - appropriately. All other fields are left as-is. Fields that are allowed to - appear multiple times are stored as lists. - - The second dict contains all other fields from the metadata. This includes - any unrecognized fields. It also includes any fields which are expected to - be parsed into a built-in type but were not formatted appropriately. Finally, - any fields that are expected to appear only once but are repeated are - included in this dict. - - """ - raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {} - unparsed: Dict[str, List[str]] = {} - - if isinstance(data, str): - parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) - else: - parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) - - # We have to wrap parsed.keys() in a set, because in the case of multiple - # values for a key (a list), the key will appear multiple times in the - # list of keys, but we're avoiding that by using get_all(). - for name in frozenset(parsed.keys()): - # Header names in RFC are case insensitive, so we'll normalize to all - # lower case to make comparisons easier. - name = name.lower() - - # We use get_all() here, even for fields that aren't multiple use, - # because otherwise someone could have e.g. two Name fields, and we - # would just silently ignore it rather than doing something about it. - headers = parsed.get_all(name) or [] - - # The way the email module works when parsing bytes is that it - # unconditionally decodes the bytes as ascii using the surrogateescape - # handler. When you pull that data back out (such as with get_all() ), - # it looks to see if the str has any surrogate escapes, and if it does - # it wraps it in a Header object instead of returning the string. - # - # As such, we'll look for those Header objects, and fix up the encoding. - value = [] - # Flag if we have run into any issues processing the headers, thus - # signalling that the data belongs in 'unparsed'. - valid_encoding = True - for h in headers: - # It's unclear if this can return more types than just a Header or - # a str, so we'll just assert here to make sure. - assert isinstance(h, (email.header.Header, str)) - - # If it's a header object, we need to do our little dance to get - # the real data out of it. In cases where there is invalid data - # we're going to end up with mojibake, but there's no obvious, good - # way around that without reimplementing parts of the Header object - # ourselves. - # - # That should be fine since, if mojibacked happens, this key is - # going into the unparsed dict anyways. - if isinstance(h, email.header.Header): - # The Header object stores it's data as chunks, and each chunk - # can be independently encoded, so we'll need to check each - # of them. - chunks: List[Tuple[bytes, Optional[str]]] = [] - for bin, encoding in email.header.decode_header(h): - try: - bin.decode("utf8", "strict") - except UnicodeDecodeError: - # Enable mojibake. - encoding = "latin1" - valid_encoding = False - else: - encoding = "utf8" - chunks.append((bin, encoding)) - - # Turn our chunks back into a Header object, then let that - # Header object do the right thing to turn them into a - # string for us. - value.append(str(email.header.make_header(chunks))) - # This is already a string, so just add it. - else: - value.append(h) - - # We've processed all of our values to get them into a list of str, - # but we may have mojibake data, in which case this is an unparsed - # field. - if not valid_encoding: - unparsed[name] = value - continue - - raw_name = _EMAIL_TO_RAW_MAPPING.get(name) - if raw_name is None: - # This is a bit of a weird situation, we've encountered a key that - # we don't know what it means, so we don't know whether it's meant - # to be a list or not. - # - # Since we can't really tell one way or another, we'll just leave it - # as a list, even though it may be a single item list, because that's - # what makes the most sense for email headers. - unparsed[name] = value - continue - - # If this is one of our string fields, then we'll check to see if our - # value is a list of a single item. If it is then we'll assume that - # it was emitted as a single string, and unwrap the str from inside - # the list. - # - # If it's any other kind of data, then we haven't the faintest clue - # what we should parse it as, and we have to just add it to our list - # of unparsed stuff. - if raw_name in _STRING_FIELDS and len(value) == 1: - raw[raw_name] = value[0] - # If this is one of our list of string fields, then we can just assign - # the value, since email *only* has strings, and our get_all() call - # above ensures that this is a list. - elif raw_name in _LIST_FIELDS: - raw[raw_name] = value - # Special Case: Keywords - # The keywords field is implemented in the metadata spec as a str, - # but it conceptually is a list of strings, and is serialized using - # ", ".join(keywords), so we'll do some light data massaging to turn - # this into what it logically is. - elif raw_name == "keywords" and len(value) == 1: - raw[raw_name] = _parse_keywords(value[0]) - # Special Case: Project-URL - # The project urls is implemented in the metadata spec as a list of - # specially-formatted strings that represent a key and a value, which - # is fundamentally a mapping, however the email format doesn't support - # mappings in a sane way, so it was crammed into a list of strings - # instead. - # - # We will do a little light data massaging to turn this into a map as - # it logically should be. - elif raw_name == "project_urls": - try: - raw[raw_name] = _parse_project_urls(value) - except KeyError: - unparsed[name] = value - # Nothing that we've done has managed to parse this, so it'll just - # throw it in our unparseable data and move on. - else: - unparsed[name] = value - - # We need to support getting the Description from the message payload in - # addition to getting it from the the headers. This does mean, though, there - # is the possibility of it being set both ways, in which case we put both - # in 'unparsed' since we don't know which is right. - try: - payload = _get_payload(parsed, data) - except ValueError: - unparsed.setdefault("description", []).append( - parsed.get_payload(decode=isinstance(data, bytes)) - ) - else: - if payload: - # Check to see if we've already got a description, if so then both - # it, and this body move to unparseable. - if "description" in raw: - description_header = cast(str, raw.pop("description")) - unparsed.setdefault("description", []).extend( - [description_header, payload] - ) - elif "description" in unparsed: - unparsed["description"].append(payload) - else: - raw["description"] = payload - - # We need to cast our `raw` to a metadata, because a TypedDict only support - # literal key names, but we're computing our key names on purpose, but the - # way this function is implemented, our `TypedDict` can only have valid key - # names. - return cast(RawMetadata, raw), unparsed - - -_NOT_FOUND = object() - - -# Keep the two values in sync. -_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] -_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"] - -_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) - - -class _Validator(Generic[T]): - """Validate a metadata field. - - All _process_*() methods correspond to a core metadata field. The method is - called with the field's raw value. If the raw value is valid it is returned - in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). - If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause - as appropriate). - """ - - name: str - raw_name: str - added: _MetadataVersion - - def __init__( - self, - *, - added: _MetadataVersion = "1.0", - ) -> None: - self.added = added - - def __set_name__(self, _owner: "Metadata", name: str) -> None: - self.name = name - self.raw_name = _RAW_TO_EMAIL_MAPPING[name] - - def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T: - # With Python 3.8, the caching can be replaced with functools.cached_property(). - # No need to check the cache as attribute lookup will resolve into the - # instance's __dict__ before __get__ is called. - cache = instance.__dict__ - value = instance._raw.get(self.name) - - # To make the _process_* methods easier, we'll check if the value is None - # and if this field is NOT a required attribute, and if both of those - # things are true, we'll skip the the converter. This will mean that the - # converters never have to deal with the None union. - if self.name in _REQUIRED_ATTRS or value is not None: - try: - converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") - except AttributeError: - pass - else: - value = converter(value) - - cache[self.name] = value - try: - del instance._raw[self.name] # type: ignore[misc] - except KeyError: - pass - - return cast(T, value) - - def _invalid_metadata( - self, msg: str, cause: Optional[Exception] = None - ) -> InvalidMetadata: - exc = InvalidMetadata( - self.raw_name, msg.format_map({"field": repr(self.raw_name)}) - ) - exc.__cause__ = cause - return exc - - def _process_metadata_version(self, value: str) -> _MetadataVersion: - # Implicitly makes Metadata-Version required. - if value not in _VALID_METADATA_VERSIONS: - raise self._invalid_metadata(f"{value!r} is not a valid metadata version") - return cast(_MetadataVersion, value) - - def _process_name(self, value: str) -> str: - if not value: - raise self._invalid_metadata("{field} is a required field") - # Validate the name as a side-effect. - try: - utils.canonicalize_name(value, validate=True) - except utils.InvalidName as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - else: - return value - - def _process_version(self, value: str) -> version_module.Version: - if not value: - raise self._invalid_metadata("{field} is a required field") - try: - return version_module.parse(value) - except version_module.InvalidVersion as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - - def _process_summary(self, value: str) -> str: - """Check the field contains no newlines.""" - if "\n" in value: - raise self._invalid_metadata("{field} must be a single line") - return value - - def _process_description_content_type(self, value: str) -> str: - content_types = {"text/plain", "text/x-rst", "text/markdown"} - message = email.message.EmailMessage() - message["content-type"] = value - - content_type, parameters = ( - # Defaults to `text/plain` if parsing failed. - message.get_content_type().lower(), - message["content-type"].params, - ) - # Check if content-type is valid or defaulted to `text/plain` and thus was - # not parseable. - if content_type not in content_types or content_type not in value.lower(): - raise self._invalid_metadata( - f"{{field}} must be one of {list(content_types)}, not {value!r}" - ) - - charset = parameters.get("charset", "UTF-8") - if charset != "UTF-8": - raise self._invalid_metadata( - f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" - ) - - markdown_variants = {"GFM", "CommonMark"} - variant = parameters.get("variant", "GFM") # Use an acceptable default. - if content_type == "text/markdown" and variant not in markdown_variants: - raise self._invalid_metadata( - f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " - f"not {variant!r}", - ) - return value - - def _process_dynamic(self, value: List[str]) -> List[str]: - for dynamic_field in map(str.lower, value): - if dynamic_field in {"name", "version", "metadata-version"}: - raise self._invalid_metadata( - f"{value!r} is not allowed as a dynamic field" - ) - elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: - raise self._invalid_metadata(f"{value!r} is not a valid dynamic field") - return list(map(str.lower, value)) - - def _process_provides_extra( - self, - value: List[str], - ) -> List[utils.NormalizedName]: - normalized_names = [] - try: - for name in value: - normalized_names.append(utils.canonicalize_name(name, validate=True)) - except utils.InvalidName as exc: - raise self._invalid_metadata( - f"{name!r} is invalid for {{field}}", cause=exc - ) - else: - return normalized_names - - def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: - try: - return specifiers.SpecifierSet(value) - except specifiers.InvalidSpecifier as exc: - raise self._invalid_metadata( - f"{value!r} is invalid for {{field}}", cause=exc - ) - - def _process_requires_dist( - self, - value: List[str], - ) -> List[requirements.Requirement]: - reqs = [] - try: - for req in value: - reqs.append(requirements.Requirement(req)) - except requirements.InvalidRequirement as exc: - raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc) - else: - return reqs - - -class Metadata: - """Representation of distribution metadata. - - Compared to :class:`RawMetadata`, this class provides objects representing - metadata fields instead of only using built-in types. Any invalid metadata - will cause :exc:`InvalidMetadata` to be raised (with a - :py:attr:`~BaseException.__cause__` attribute as appropriate). - """ - - _raw: RawMetadata - - @classmethod - def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": - """Create an instance from :class:`RawMetadata`. - - If *validate* is true, all metadata will be validated. All exceptions - related to validation will be gathered and raised as an :class:`ExceptionGroup`. - """ - ins = cls() - ins._raw = data.copy() # Mutations occur due to caching enriched values. - - if validate: - exceptions: List[Exception] = [] - try: - metadata_version = ins.metadata_version - metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) - except InvalidMetadata as metadata_version_exc: - exceptions.append(metadata_version_exc) - metadata_version = None - - # Make sure to check for the fields that are present, the required - # fields (so their absence can be reported). - fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS - # Remove fields that have already been checked. - fields_to_check -= {"metadata_version"} - - for key in fields_to_check: - try: - if metadata_version: - # Can't use getattr() as that triggers descriptor protocol which - # will fail due to no value for the instance argument. - try: - field_metadata_version = cls.__dict__[key].added - except KeyError: - exc = InvalidMetadata(key, f"unrecognized field: {key!r}") - exceptions.append(exc) - continue - field_age = _VALID_METADATA_VERSIONS.index( - field_metadata_version - ) - if field_age > metadata_age: - field = _RAW_TO_EMAIL_MAPPING[key] - exc = InvalidMetadata( - field, - "{field} introduced in metadata version " - "{field_metadata_version}, not {metadata_version}", - ) - exceptions.append(exc) - continue - getattr(ins, key) - except InvalidMetadata as exc: - exceptions.append(exc) - - if exceptions: - raise ExceptionGroup("invalid metadata", exceptions) - - return ins - - @classmethod - def from_email( - cls, data: Union[bytes, str], *, validate: bool = True - ) -> "Metadata": - """Parse metadata from email headers. - - If *validate* is true, the metadata will be validated. All exceptions - related to validation will be gathered and raised as an :class:`ExceptionGroup`. - """ - raw, unparsed = parse_email(data) - - if validate: - exceptions: list[Exception] = [] - for unparsed_key in unparsed: - if unparsed_key in _EMAIL_TO_RAW_MAPPING: - message = f"{unparsed_key!r} has invalid data" - else: - message = f"unrecognized field: {unparsed_key!r}" - exceptions.append(InvalidMetadata(unparsed_key, message)) - - if exceptions: - raise ExceptionGroup("unparsed", exceptions) - - try: - return cls.from_raw(raw, validate=validate) - except ExceptionGroup as exc_group: - raise ExceptionGroup( - "invalid or unparsed metadata", exc_group.exceptions - ) from None - - metadata_version: _Validator[_MetadataVersion] = _Validator() - """:external:ref:`core-metadata-metadata-version` - (required; validated to be a valid metadata version)""" - name: _Validator[str] = _Validator() - """:external:ref:`core-metadata-name` - (required; validated using :func:`~packaging.utils.canonicalize_name` and its - *validate* parameter)""" - version: _Validator[version_module.Version] = _Validator() - """:external:ref:`core-metadata-version` (required)""" - dynamic: _Validator[Optional[List[str]]] = _Validator( - added="2.2", - ) - """:external:ref:`core-metadata-dynamic` - (validated against core metadata field names and lowercased)""" - platforms: _Validator[Optional[List[str]]] = _Validator() - """:external:ref:`core-metadata-platform`""" - supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """:external:ref:`core-metadata-supported-platform`""" - summary: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" - description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body - """:external:ref:`core-metadata-description`""" - description_content_type: _Validator[Optional[str]] = _Validator(added="2.1") - """:external:ref:`core-metadata-description-content-type` (validated)""" - keywords: _Validator[Optional[List[str]]] = _Validator() - """:external:ref:`core-metadata-keywords`""" - home_page: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-home-page`""" - download_url: _Validator[Optional[str]] = _Validator(added="1.1") - """:external:ref:`core-metadata-download-url`""" - author: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-author`""" - author_email: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-author-email`""" - maintainer: _Validator[Optional[str]] = _Validator(added="1.2") - """:external:ref:`core-metadata-maintainer`""" - maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2") - """:external:ref:`core-metadata-maintainer-email`""" - license: _Validator[Optional[str]] = _Validator() - """:external:ref:`core-metadata-license`""" - classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """:external:ref:`core-metadata-classifier`""" - requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator( - added="1.2" - ) - """:external:ref:`core-metadata-requires-dist`""" - requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator( - added="1.2" - ) - """:external:ref:`core-metadata-requires-python`""" - # Because `Requires-External` allows for non-PEP 440 version specifiers, we - # don't do any processing on the values. - requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-requires-external`""" - project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-project-url`""" - # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation - # regardless of metadata version. - provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator( - added="2.1", - ) - """:external:ref:`core-metadata-provides-extra`""" - provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-provides-dist`""" - obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") - """:external:ref:`core-metadata-obsoletes-dist`""" - requires: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """``Requires`` (deprecated)""" - provides: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """``Provides`` (deprecated)""" - obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1") - """``Obsoletes`` (deprecated)""" diff --git a/setuptools/_distutils/_vendor/packaging/requirements.py b/setuptools/_distutils/_vendor/packaging/requirements.py deleted file mode 100644 index bdc43a7e98..0000000000 --- a/setuptools/_distutils/_vendor/packaging/requirements.py +++ /dev/null @@ -1,90 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from typing import Any, Iterator, Optional, Set - -from ._parser import parse_requirement as _parse_requirement -from ._tokenizer import ParserSyntaxError -from .markers import Marker, _normalize_extra_values -from .specifiers import SpecifierSet -from .utils import canonicalize_name - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -class Requirement: - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string: str) -> None: - try: - parsed = _parse_requirement(requirement_string) - except ParserSyntaxError as e: - raise InvalidRequirement(str(e)) from e - - self.name: str = parsed.name - self.url: Optional[str] = parsed.url or None - self.extras: Set[str] = set(parsed.extras or []) - self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) - self.marker: Optional[Marker] = None - if parsed.marker is not None: - self.marker = Marker.__new__(Marker) - self.marker._markers = _normalize_extra_values(parsed.marker) - - def _iter_parts(self, name: str) -> Iterator[str]: - yield name - - if self.extras: - formatted_extras = ",".join(sorted(self.extras)) - yield f"[{formatted_extras}]" - - if self.specifier: - yield str(self.specifier) - - if self.url: - yield f"@ {self.url}" - if self.marker: - yield " " - - if self.marker: - yield f"; {self.marker}" - - def __str__(self) -> str: - return "".join(self._iter_parts(self.name)) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash( - ( - self.__class__.__name__, - *self._iter_parts(canonicalize_name(self.name)), - ) - ) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Requirement): - return NotImplemented - - return ( - canonicalize_name(self.name) == canonicalize_name(other.name) - and self.extras == other.extras - and self.specifier == other.specifier - and self.url == other.url - and self.marker == other.marker - ) diff --git a/setuptools/_distutils/_vendor/packaging/specifiers.py b/setuptools/_distutils/_vendor/packaging/specifiers.py deleted file mode 100644 index 2d015bab59..0000000000 --- a/setuptools/_distutils/_vendor/packaging/specifiers.py +++ /dev/null @@ -1,1017 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -""" -.. testsetup:: - - from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier - from packaging.version import Version -""" - -import abc -import itertools -import re -from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union - -from .utils import canonicalize_version -from .version import Version - -UnparsedVersion = Union[Version, str] -UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) -CallableOperator = Callable[[Version, str], bool] - - -def _coerce_version(version: UnparsedVersion) -> Version: - if not isinstance(version, Version): - version = Version(version) - return version - - -class InvalidSpecifier(ValueError): - """ - Raised when attempting to create a :class:`Specifier` with a specifier - string that is invalid. - - >>> Specifier("lolwat") - Traceback (most recent call last): - ... - packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' - """ - - -class BaseSpecifier(metaclass=abc.ABCMeta): - @abc.abstractmethod - def __str__(self) -> str: - """ - Returns the str representation of this Specifier-like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self) -> int: - """ - Returns a hash value for this Specifier-like object. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Returns a boolean representing whether or not the two Specifier-like - objects are equal. - - :param other: The other object to check against. - """ - - @property - @abc.abstractmethod - def prereleases(self) -> Optional[bool]: - """Whether or not pre-releases as a whole are allowed. - - This can be set to either ``True`` or ``False`` to explicitly enable or disable - prereleases or it can be set to ``None`` (the default) to use default semantics. - """ - - @prereleases.setter - def prereleases(self, value: bool) -> None: - """Setter for :attr:`prereleases`. - - :param value: The value to set. - """ - - @abc.abstractmethod - def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None - ) -> Iterator[UnparsedVersionVar]: - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class Specifier(BaseSpecifier): - """This class abstracts handling of version specifiers. - - .. tip:: - - It is generally not required to instantiate this manually. You should instead - prefer to work with :class:`SpecifierSet` instead, which can parse - comma-separated version specifiers (which is what package metadata contains). - """ - - _operator_regex_str = r""" - (?P(~=|==|!=|<=|>=|<|>|===)) - """ - _version_regex_str = r""" - (?P - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s;)]* # The arbitrary version can be just about anything, - # we match everything except for whitespace, a - # semi-colon for marker support, and a closing paren - # since versions can be enclosed in them. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - - # You cannot use a wild card and a pre-release, post-release, a dev or - # local version together so group them with a | and make them optional. - (?: - \.\* # Wild card syntax of .* - | - (?: # pre release - [-_\.]? - (alpha|beta|preview|pre|a|b|c|rc) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (alpha|beta|preview|pre|a|b|c|rc) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: - """Initialize a Specifier instance. - - :param spec: - The string representation of a specifier which will be parsed and - normalized before use. - :param prereleases: - This tells the specifier if it should accept prerelease versions if - applicable or not. The default of ``None`` will autodetect it from the - given specifiers. - :raises InvalidSpecifier: - If the given specifier is invalid (i.e. bad syntax). - """ - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - - self._spec: Tuple[str, str] = ( - match.group("operator").strip(), - match.group("version").strip(), - ) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 - @property # type: ignore[override] - def prereleases(self) -> bool: - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if Version(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - @property - def operator(self) -> str: - """The operator of this specifier. - - >>> Specifier("==1.2.3").operator - '==' - """ - return self._spec[0] - - @property - def version(self) -> str: - """The version of this specifier. - - >>> Specifier("==1.2.3").version - '1.2.3' - """ - return self._spec[1] - - def __repr__(self) -> str: - """A representation of the Specifier that shows all internal state. - - >>> Specifier('>=1.0.0') - =1.0.0')> - >>> Specifier('>=1.0.0', prereleases=False) - =1.0.0', prereleases=False)> - >>> Specifier('>=1.0.0', prereleases=True) - =1.0.0', prereleases=True)> - """ - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"<{self.__class__.__name__}({str(self)!r}{pre})>" - - def __str__(self) -> str: - """A string representation of the Specifier that can be round-tripped. - - >>> str(Specifier('>=1.0.0')) - '>=1.0.0' - >>> str(Specifier('>=1.0.0', prereleases=False)) - '>=1.0.0' - """ - return "{}{}".format(*self._spec) - - @property - def _canonical_spec(self) -> Tuple[str, str]: - canonical_version = canonicalize_version( - self._spec[1], - strip_trailing_zero=(self._spec[0] != "~="), - ) - return self._spec[0], canonical_version - - def __hash__(self) -> int: - return hash(self._canonical_spec) - - def __eq__(self, other: object) -> bool: - """Whether or not the two Specifier-like objects are equal. - - :param other: The other object to check against. - - The value of :attr:`prereleases` is ignored. - - >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") - True - >>> (Specifier("==1.2.3", prereleases=False) == - ... Specifier("==1.2.3", prereleases=True)) - True - >>> Specifier("==1.2.3") == "==1.2.3" - True - >>> Specifier("==1.2.3") == Specifier("==1.2.4") - False - >>> Specifier("==1.2.3") == Specifier("~=1.2.3") - False - """ - if isinstance(other, str): - try: - other = self.__class__(str(other)) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._canonical_spec == other._canonical_spec - - def _get_operator(self, op: str) -> CallableOperator: - operator_callable: CallableOperator = getattr( - self, f"_compare_{self._operators[op]}" - ) - return operator_callable - - def _compare_compatible(self, prospective: Version, spec: str) -> bool: - - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore suffix segments. - prefix = _version_join( - list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( - prospective, prefix - ) - - def _compare_equal(self, prospective: Version, spec: str) -> bool: - - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - normalized_prospective = canonicalize_version( - prospective.public, strip_trailing_zero=False - ) - # Get the normalized version string ignoring the trailing .* - normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) - # Split the spec out by bangs and dots, and pretend that there is - # an implicit dot in between a release segment and a pre-release segment. - split_spec = _version_split(normalized_spec) - - # Split the prospective version out by bangs and dots, and pretend - # that there is an implicit dot in between a release segment and - # a pre-release segment. - split_prospective = _version_split(normalized_prospective) - - # 0-pad the prospective version before shortening it to get the correct - # shortened version. - padded_prospective, _ = _pad_version(split_prospective, split_spec) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - shortened_prospective = padded_prospective[: len(split_spec)] - - return shortened_prospective == split_spec - else: - # Convert our spec string into a Version - spec_version = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec_version.local: - prospective = Version(prospective.public) - - return prospective == spec_version - - def _compare_not_equal(self, prospective: Version, spec: str) -> bool: - return not self._compare_equal(prospective, spec) - - def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) <= Version(spec) - - def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: - - # NB: Local version identifiers are NOT permitted in the version - # specifier, so local version labels can be universally removed from - # the prospective version. - return Version(prospective.public) >= Version(spec) - - def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: - - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec_str) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is technically greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: - return str(prospective).lower() == str(spec).lower() - - def __contains__(self, item: Union[str, Version]) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: The item to check for. - - This is used for the ``in`` operator and behaves the same as - :meth:`contains` with no ``prereleases`` argument passed. - - >>> "1.2.3" in Specifier(">=1.2.3") - True - >>> Version("1.2.3") in Specifier(">=1.2.3") - True - >>> "1.0.0" in Specifier(">=1.2.3") - False - >>> "1.3.0a1" in Specifier(">=1.2.3") - False - >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) - True - """ - return self.contains(item) - - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: - The item to check for, which can be a version string or a - :class:`Version` instance. - :param prereleases: - Whether or not to match prereleases with this Specifier. If set to - ``None`` (the default), it uses :attr:`prereleases` to determine - whether or not prereleases are allowed. - - >>> Specifier(">=1.2.3").contains("1.2.3") - True - >>> Specifier(">=1.2.3").contains(Version("1.2.3")) - True - >>> Specifier(">=1.2.3").contains("1.0.0") - False - >>> Specifier(">=1.2.3").contains("1.3.0a1") - False - >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") - True - >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) - True - """ - - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version, this allows us to have a shortcut for - # "2.0" in Specifier(">=2") - normalized_item = _coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if normalized_item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - operator_callable: CallableOperator = self._get_operator(self.operator) - return operator_callable(normalized_item, self.version) - - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None - ) -> Iterator[UnparsedVersionVar]: - """Filter items in the given iterable, that match the specifier. - - :param iterable: - An iterable that can contain version strings and :class:`Version` instances. - The items in the iterable will be filtered according to the specifier. - :param prereleases: - Whether or not to allow prereleases in the returned iterator. If set to - ``None`` (the default), it will be intelligently decide whether to allow - prereleases or not (based on the :attr:`prereleases` attribute, and - whether the only versions matching are prereleases). - - This method is smarter than just ``filter(Specifier().contains, [...])`` - because it implements the rule from :pep:`440` that a prerelease item - SHOULD be accepted if no other versions match the given specifier. - - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) - ['1.3'] - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) - ['1.2.3', '1.3', ] - >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) - ['1.5a1'] - >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - """ - - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = _coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later in case nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version: str) -> List[str]: - """Split version into components. - - The split components are intended for version comparison. The logic does - not attempt to retain the original version string, so joining the - components back with :func:`_version_join` may not produce the original - version string. - """ - result: List[str] = [] - - epoch, _, rest = version.rpartition("!") - result.append(epoch or "0") - - for item in rest.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _version_join(components: List[str]) -> str: - """Join split version components into a version string. - - This function assumes the input came from :func:`_version_split`, where the - first component must be the epoch (either empty or numeric), and all other - components numeric. - """ - epoch, *rest = components - return f"{epoch}!{'.'.join(rest)}" - - -def _is_not_suffix(segment: str) -> bool: - return not any( - segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") - ) - - -def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]) :]) - right_split.append(right[len(right_split[0]) :]) - - # Insert our padding - left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) - right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - - return ( - list(itertools.chain.from_iterable(left_split)), - list(itertools.chain.from_iterable(right_split)), - ) - - -class SpecifierSet(BaseSpecifier): - """This class abstracts handling of a set of version specifiers. - - It can be passed a single specifier (``>=3.0``), a comma-separated list of - specifiers (``>=3.0,!=3.1``), or no specifier at all. - """ - - def __init__( - self, specifiers: str = "", prereleases: Optional[bool] = None - ) -> None: - """Initialize a SpecifierSet instance. - - :param specifiers: - The string representation of a specifier or a comma-separated list of - specifiers which will be parsed and normalized before use. - :param prereleases: - This tells the SpecifierSet if it should accept prerelease versions if - applicable or not. The default of ``None`` will autodetect it from the - given specifiers. - - :raises InvalidSpecifier: - If the given ``specifiers`` are not parseable than this exception will be - raised. - """ - - # Split on `,` to break each individual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Make each individual specifier a Specifier and save in a frozen set for later. - self._specs = frozenset(map(Specifier, split_specifiers)) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - @property - def prereleases(self) -> Optional[bool]: - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value: bool) -> None: - self._prereleases = value - - def __repr__(self) -> str: - """A representation of the specifier set that shows all internal state. - - Note that the ordering of the individual specifiers within the set may not - match the input string. - - >>> SpecifierSet('>=1.0.0,!=2.0.0') - =1.0.0')> - >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) - =1.0.0', prereleases=False)> - >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) - =1.0.0', prereleases=True)> - """ - pre = ( - f", prereleases={self.prereleases!r}" - if self._prereleases is not None - else "" - ) - - return f"" - - def __str__(self) -> str: - """A string representation of the specifier set that can be round-tripped. - - Note that the ordering of the individual specifiers within the set may not - match the input string. - - >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) - '!=1.0.1,>=1.0.0' - >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) - '!=1.0.1,>=1.0.0' - """ - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self) -> int: - return hash(self._specs) - - def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": - """Return a SpecifierSet which is a combination of the two sets. - - :param other: The other object to combine with. - - >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' - =1.0.0')> - >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') - =1.0.0')> - """ - if isinstance(other, str): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other: object) -> bool: - """Whether or not the two SpecifierSet-like objects are equal. - - :param other: The other object to check against. - - The value of :attr:`prereleases` is ignored. - - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == - ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) - True - >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" - True - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") - False - """ - if isinstance(other, (str, Specifier)): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __len__(self) -> int: - """Returns the number of specifiers in this specifier set.""" - return len(self._specs) - - def __iter__(self) -> Iterator[Specifier]: - """ - Returns an iterator over all the underlying :class:`Specifier` instances - in this specifier set. - - >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) - [, =1.0.0')>] - """ - return iter(self._specs) - - def __contains__(self, item: UnparsedVersion) -> bool: - """Return whether or not the item is contained in this specifier. - - :param item: The item to check for. - - This is used for the ``in`` operator and behaves the same as - :meth:`contains` with no ``prereleases`` argument passed. - - >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") - True - >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") - False - >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") - False - >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) - True - """ - return self.contains(item) - - def contains( - self, - item: UnparsedVersion, - prereleases: Optional[bool] = None, - installed: Optional[bool] = None, - ) -> bool: - """Return whether or not the item is contained in this SpecifierSet. - - :param item: - The item to check for, which can be a version string or a - :class:`Version` instance. - :param prereleases: - Whether or not to match prereleases with this SpecifierSet. If set to - ``None`` (the default), it uses :attr:`prereleases` to determine - whether or not prereleases are allowed. - - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") - False - >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") - True - >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) - True - """ - # Ensure that our item is a Version instance. - if not isinstance(item, Version): - item = Version(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - if installed and item.is_prerelease: - item = Version(item.base_version) - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all(s.contains(item, prereleases=prereleases) for s in self._specs) - - def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None - ) -> Iterator[UnparsedVersionVar]: - """Filter items in the given iterable, that match the specifiers in this set. - - :param iterable: - An iterable that can contain version strings and :class:`Version` instances. - The items in the iterable will be filtered according to the specifier. - :param prereleases: - Whether or not to allow prereleases in the returned iterator. If set to - ``None`` (the default), it will be intelligently decide whether to allow - prereleases or not (based on the :attr:`prereleases` attribute, and - whether the only versions matching are prereleases). - - This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` - because it implements the rule from :pep:`440` that a prerelease item - SHOULD be accepted if no other versions match the given specifier. - - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) - ['1.3'] - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) - ['1.3', ] - >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) - [] - >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - - An "empty" SpecifierSet will filter items based on the presence of prerelease - versions in the set. - - >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) - ['1.3'] - >>> list(SpecifierSet("").filter(["1.5a1"])) - ['1.5a1'] - >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) - ['1.3', '1.5a1'] - >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) - ['1.3', '1.5a1'] - """ - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iter(iterable) - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases. - else: - filtered: List[UnparsedVersionVar] = [] - found_prereleases: List[UnparsedVersionVar] = [] - - for item in iterable: - parsed_version = _coerce_version(item) - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return iter(found_prereleases) - - return iter(filtered) diff --git a/setuptools/_distutils/_vendor/packaging/tags.py b/setuptools/_distutils/_vendor/packaging/tags.py deleted file mode 100644 index 89f1926137..0000000000 --- a/setuptools/_distutils/_vendor/packaging/tags.py +++ /dev/null @@ -1,571 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import logging -import platform -import re -import struct -import subprocess -import sys -import sysconfig -from importlib.machinery import EXTENSION_SUFFIXES -from typing import ( - Dict, - FrozenSet, - Iterable, - Iterator, - List, - Optional, - Sequence, - Tuple, - Union, - cast, -) - -from . import _manylinux, _musllinux - -logger = logging.getLogger(__name__) - -PythonVersion = Sequence[int] -MacVersion = Tuple[int, int] - -INTERPRETER_SHORT_NAMES: Dict[str, str] = { - "python": "py", # Generic. - "cpython": "cp", - "pypy": "pp", - "ironpython": "ip", - "jython": "jy", -} - - -_32_BIT_INTERPRETER = struct.calcsize("P") == 4 - - -class Tag: - """ - A representation of the tag triple for a wheel. - - Instances are considered immutable and thus are hashable. Equality checking - is also supported. - """ - - __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] - - def __init__(self, interpreter: str, abi: str, platform: str) -> None: - self._interpreter = interpreter.lower() - self._abi = abi.lower() - self._platform = platform.lower() - # The __hash__ of every single element in a Set[Tag] will be evaluated each time - # that a set calls its `.disjoint()` method, which may be called hundreds of - # times when scanning a page of links for packages with tags matching that - # Set[Tag]. Pre-computing the value here produces significant speedups for - # downstream consumers. - self._hash = hash((self._interpreter, self._abi, self._platform)) - - @property - def interpreter(self) -> str: - return self._interpreter - - @property - def abi(self) -> str: - return self._abi - - @property - def platform(self) -> str: - return self._platform - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Tag): - return NotImplemented - - return ( - (self._hash == other._hash) # Short-circuit ASAP for perf reasons. - and (self._platform == other._platform) - and (self._abi == other._abi) - and (self._interpreter == other._interpreter) - ) - - def __hash__(self) -> int: - return self._hash - - def __str__(self) -> str: - return f"{self._interpreter}-{self._abi}-{self._platform}" - - def __repr__(self) -> str: - return f"<{self} @ {id(self)}>" - - -def parse_tag(tag: str) -> FrozenSet[Tag]: - """ - Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. - - Returning a set is required due to the possibility that the tag is a - compressed tag set. - """ - tags = set() - interpreters, abis, platforms = tag.split("-") - for interpreter in interpreters.split("."): - for abi in abis.split("."): - for platform_ in platforms.split("."): - tags.add(Tag(interpreter, abi, platform_)) - return frozenset(tags) - - -def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: - value: Union[int, str, None] = sysconfig.get_config_var(name) - if value is None and warn: - logger.debug( - "Config variable '%s' is unset, Python ABI tag may be incorrect", name - ) - return value - - -def _normalize_string(string: str) -> str: - return string.replace(".", "_").replace("-", "_").replace(" ", "_") - - -def _is_threaded_cpython(abis: List[str]) -> bool: - """ - Determine if the ABI corresponds to a threaded (`--disable-gil`) build. - - The threaded builds are indicated by a "t" in the abiflags. - """ - if len(abis) == 0: - return False - # expect e.g., cp313 - m = re.match(r"cp\d+(.*)", abis[0]) - if not m: - return False - abiflags = m.group(1) - return "t" in abiflags - - -def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: - """ - Determine if the Python version supports abi3. - - PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) - builds do not support abi3. - """ - return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading - - -def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: - py_version = tuple(py_version) # To allow for version comparison. - abis = [] - version = _version_nodot(py_version[:2]) - threading = debug = pymalloc = ucs4 = "" - with_debug = _get_config_var("Py_DEBUG", warn) - has_refcount = hasattr(sys, "gettotalrefcount") - # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled - # extension modules is the best option. - # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 - has_ext = "_d.pyd" in EXTENSION_SUFFIXES - if with_debug or (with_debug is None and (has_refcount or has_ext)): - debug = "d" - if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): - threading = "t" - if py_version < (3, 8): - with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) - if with_pymalloc or with_pymalloc is None: - pymalloc = "m" - if py_version < (3, 3): - unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) - if unicode_size == 4 or ( - unicode_size is None and sys.maxunicode == 0x10FFFF - ): - ucs4 = "u" - elif debug: - # Debug builds can also load "normal" extension modules. - # We can also assume no UCS-4 or pymalloc requirement. - abis.append(f"cp{version}{threading}") - abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") - return abis - - -def cpython_tags( - python_version: Optional[PythonVersion] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a CPython interpreter. - - The tags consist of: - - cp-- - - cp-abi3- - - cp-none- - - cp-abi3- # Older Python versions down to 3.2. - - If python_version only specifies a major version then user-provided ABIs and - the 'none' ABItag will be used. - - If 'abi3' or 'none' are specified in 'abis' then they will be yielded at - their normal position and not at the beginning. - """ - if not python_version: - python_version = sys.version_info[:2] - - interpreter = f"cp{_version_nodot(python_version[:2])}" - - if abis is None: - if len(python_version) > 1: - abis = _cpython_abis(python_version, warn) - else: - abis = [] - abis = list(abis) - # 'abi3' and 'none' are explicitly handled later. - for explicit_abi in ("abi3", "none"): - try: - abis.remove(explicit_abi) - except ValueError: - pass - - platforms = list(platforms or platform_tags()) - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - threading = _is_threaded_cpython(abis) - use_abi3 = _abi3_applies(python_version, threading) - if use_abi3: - yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) - yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) - - if use_abi3: - for minor_version in range(python_version[1] - 1, 1, -1): - for platform_ in platforms: - interpreter = "cp{version}".format( - version=_version_nodot((python_version[0], minor_version)) - ) - yield Tag(interpreter, "abi3", platform_) - - -def _generic_abi() -> List[str]: - """ - Return the ABI tag based on EXT_SUFFIX. - """ - # The following are examples of `EXT_SUFFIX`. - # We want to keep the parts which are related to the ABI and remove the - # parts which are related to the platform: - # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 - # - mac: '.cpython-310-darwin.so' => cp310 - # - win: '.cp310-win_amd64.pyd' => cp310 - # - win: '.pyd' => cp37 (uses _cpython_abis()) - # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 - # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' - # => graalpy_38_native - - ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) - if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": - raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") - parts = ext_suffix.split(".") - if len(parts) < 3: - # CPython3.7 and earlier uses ".pyd" on Windows. - return _cpython_abis(sys.version_info[:2]) - soabi = parts[1] - if soabi.startswith("cpython"): - # non-windows - abi = "cp" + soabi.split("-")[1] - elif soabi.startswith("cp"): - # windows - abi = soabi.split("-")[0] - elif soabi.startswith("pypy"): - abi = "-".join(soabi.split("-")[:2]) - elif soabi.startswith("graalpy"): - abi = "-".join(soabi.split("-")[:3]) - elif soabi: - # pyston, ironpython, others? - abi = soabi - else: - return [] - return [_normalize_string(abi)] - - -def generic_tags( - interpreter: Optional[str] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, - *, - warn: bool = False, -) -> Iterator[Tag]: - """ - Yields the tags for a generic interpreter. - - The tags consist of: - - -- - - The "none" ABI will be added if it was not explicitly provided. - """ - if not interpreter: - interp_name = interpreter_name() - interp_version = interpreter_version(warn=warn) - interpreter = "".join([interp_name, interp_version]) - if abis is None: - abis = _generic_abi() - else: - abis = list(abis) - platforms = list(platforms or platform_tags()) - if "none" not in abis: - abis.append("none") - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - - -def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: - """ - Yields Python versions in descending order. - - After the latest version, the major-only version will be yielded, and then - all previous versions of that major version. - """ - if len(py_version) > 1: - yield f"py{_version_nodot(py_version[:2])}" - yield f"py{py_version[0]}" - if len(py_version) > 1: - for minor in range(py_version[1] - 1, -1, -1): - yield f"py{_version_nodot((py_version[0], minor))}" - - -def compatible_tags( - python_version: Optional[PythonVersion] = None, - interpreter: Optional[str] = None, - platforms: Optional[Iterable[str]] = None, -) -> Iterator[Tag]: - """ - Yields the sequence of tags that are compatible with a specific version of Python. - - The tags consist of: - - py*-none- - - -none-any # ... if `interpreter` is provided. - - py*-none-any - """ - if not python_version: - python_version = sys.version_info[:2] - platforms = list(platforms or platform_tags()) - for version in _py_interpreter_range(python_version): - for platform_ in platforms: - yield Tag(version, "none", platform_) - if interpreter: - yield Tag(interpreter, "none", "any") - for version in _py_interpreter_range(python_version): - yield Tag(version, "none", "any") - - -def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: - if not is_32bit: - return arch - - if arch.startswith("ppc"): - return "ppc" - - return "i386" - - -def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: - formats = [cpu_arch] - if cpu_arch == "x86_64": - if version < (10, 4): - return [] - formats.extend(["intel", "fat64", "fat32"]) - - elif cpu_arch == "i386": - if version < (10, 4): - return [] - formats.extend(["intel", "fat32", "fat"]) - - elif cpu_arch == "ppc64": - # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? - if version > (10, 5) or version < (10, 4): - return [] - formats.append("fat64") - - elif cpu_arch == "ppc": - if version > (10, 6): - return [] - formats.extend(["fat32", "fat"]) - - if cpu_arch in {"arm64", "x86_64"}: - formats.append("universal2") - - if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: - formats.append("universal") - - return formats - - -def mac_platforms( - version: Optional[MacVersion] = None, arch: Optional[str] = None -) -> Iterator[str]: - """ - Yields the platform tags for a macOS system. - - The `version` parameter is a two-item tuple specifying the macOS version to - generate platform tags for. The `arch` parameter is the CPU architecture to - generate platform tags for. Both parameters default to the appropriate value - for the current system. - """ - version_str, _, cpu_arch = platform.mac_ver() - if version is None: - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - if version == (10, 16): - # When built against an older macOS SDK, Python will report macOS 10.16 - # instead of the real version. - version_str = subprocess.run( - [ - sys.executable, - "-sS", - "-c", - "import platform; print(platform.mac_ver()[0])", - ], - check=True, - env={"SYSTEM_VERSION_COMPAT": "0"}, - stdout=subprocess.PIPE, - text=True, - ).stdout - version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) - else: - version = version - if arch is None: - arch = _mac_arch(cpu_arch) - else: - arch = arch - - if (10, 0) <= version and version < (11, 0): - # Prior to Mac OS 11, each yearly release of Mac OS bumped the - # "minor" version number. The major version was always 10. - for minor_version in range(version[1], -1, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=10, minor=minor_version, binary_format=binary_format - ) - - if version >= (11, 0): - # Starting with Mac OS 11, each yearly release bumps the major version - # number. The minor versions are now the midyear updates. - for major_version in range(version[0], 10, -1): - compat_version = major_version, 0 - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=major_version, minor=0, binary_format=binary_format - ) - - if version >= (11, 0): - # Mac OS 11 on x86_64 is compatible with binaries from previous releases. - # Arm64 support was introduced in 11.0, so no Arm binaries from previous - # releases exist. - # - # However, the "universal2" binary format can have a - # macOS version earlier than 11.0 when the x86_64 part of the binary supports - # that version of macOS. - if arch == "x86_64": - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - else: - for minor_version in range(16, 3, -1): - compat_version = 10, minor_version - binary_format = "universal2" - yield "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - - -def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: - linux = _normalize_string(sysconfig.get_platform()) - if not linux.startswith("linux_"): - # we should never be here, just yield the sysconfig one and return - yield linux - return - if is_32bit: - if linux == "linux_x86_64": - linux = "linux_i686" - elif linux == "linux_aarch64": - linux = "linux_armv8l" - _, arch = linux.split("_", 1) - archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) - yield from _manylinux.platform_tags(archs) - yield from _musllinux.platform_tags(archs) - for arch in archs: - yield f"linux_{arch}" - - -def _generic_platforms() -> Iterator[str]: - yield _normalize_string(sysconfig.get_platform()) - - -def platform_tags() -> Iterator[str]: - """ - Provides the platform tags for this installation. - """ - if platform.system() == "Darwin": - return mac_platforms() - elif platform.system() == "Linux": - return _linux_platforms() - else: - return _generic_platforms() - - -def interpreter_name() -> str: - """ - Returns the name of the running interpreter. - - Some implementations have a reserved, two-letter abbreviation which will - be returned when appropriate. - """ - name = sys.implementation.name - return INTERPRETER_SHORT_NAMES.get(name) or name - - -def interpreter_version(*, warn: bool = False) -> str: - """ - Returns the version of the running interpreter. - """ - version = _get_config_var("py_version_nodot", warn=warn) - if version: - version = str(version) - else: - version = _version_nodot(sys.version_info[:2]) - return version - - -def _version_nodot(version: PythonVersion) -> str: - return "".join(map(str, version)) - - -def sys_tags(*, warn: bool = False) -> Iterator[Tag]: - """ - Returns the sequence of tag triples for the running interpreter. - - The order of the sequence corresponds to priority order for the - interpreter, from most to least important. - """ - - interp_name = interpreter_name() - if interp_name == "cp": - yield from cpython_tags(warn=warn) - else: - yield from generic_tags() - - if interp_name == "pp": - interp = "pp3" - elif interp_name == "cp": - interp = "cp" + interpreter_version(warn=warn) - else: - interp = None - yield from compatible_tags(interpreter=interp) diff --git a/setuptools/_distutils/_vendor/packaging/utils.py b/setuptools/_distutils/_vendor/packaging/utils.py deleted file mode 100644 index c2c2f75aa8..0000000000 --- a/setuptools/_distutils/_vendor/packaging/utils.py +++ /dev/null @@ -1,172 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import re -from typing import FrozenSet, NewType, Tuple, Union, cast - -from .tags import Tag, parse_tag -from .version import InvalidVersion, Version - -BuildTag = Union[Tuple[()], Tuple[int, str]] -NormalizedName = NewType("NormalizedName", str) - - -class InvalidName(ValueError): - """ - An invalid distribution name; users should refer to the packaging user guide. - """ - - -class InvalidWheelFilename(ValueError): - """ - An invalid wheel filename was found, users should refer to PEP 427. - """ - - -class InvalidSdistFilename(ValueError): - """ - An invalid sdist filename was found, users should refer to the packaging user guide. - """ - - -# Core metadata spec for `Name` -_validate_regex = re.compile( - r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE -) -_canonicalize_regex = re.compile(r"[-_.]+") -_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$") -# PEP 427: The build number must start with a digit. -_build_tag_regex = re.compile(r"(\d+)(.*)") - - -def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: - if validate and not _validate_regex.match(name): - raise InvalidName(f"name is invalid: {name!r}") - # This is taken from PEP 503. - value = _canonicalize_regex.sub("-", name).lower() - return cast(NormalizedName, value) - - -def is_normalized_name(name: str) -> bool: - return _normalized_regex.match(name) is not None - - -def canonicalize_version( - version: Union[Version, str], *, strip_trailing_zero: bool = True -) -> str: - """ - This is very similar to Version.__str__, but has one subtle difference - with the way it handles the release segment. - """ - if isinstance(version, str): - try: - parsed = Version(version) - except InvalidVersion: - # Legacy versions cannot be normalized - return version - else: - parsed = version - - parts = [] - - # Epoch - if parsed.epoch != 0: - parts.append(f"{parsed.epoch}!") - - # Release segment - release_segment = ".".join(str(x) for x in parsed.release) - if strip_trailing_zero: - # NB: This strips trailing '.0's to normalize - release_segment = re.sub(r"(\.0)+$", "", release_segment) - parts.append(release_segment) - - # Pre-release - if parsed.pre is not None: - parts.append("".join(str(x) for x in parsed.pre)) - - # Post-release - if parsed.post is not None: - parts.append(f".post{parsed.post}") - - # Development release - if parsed.dev is not None: - parts.append(f".dev{parsed.dev}") - - # Local version segment - if parsed.local is not None: - parts.append(f"+{parsed.local}") - - return "".join(parts) - - -def parse_wheel_filename( - filename: str, -) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: - if not filename.endswith(".whl"): - raise InvalidWheelFilename( - f"Invalid wheel filename (extension must be '.whl'): {filename}" - ) - - filename = filename[:-4] - dashes = filename.count("-") - if dashes not in (4, 5): - raise InvalidWheelFilename( - f"Invalid wheel filename (wrong number of parts): {filename}" - ) - - parts = filename.split("-", dashes - 2) - name_part = parts[0] - # See PEP 427 for the rules on escaping the project name. - if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: - raise InvalidWheelFilename(f"Invalid project name: {filename}") - name = canonicalize_name(name_part) - - try: - version = Version(parts[1]) - except InvalidVersion as e: - raise InvalidWheelFilename( - f"Invalid wheel filename (invalid version): {filename}" - ) from e - - if dashes == 5: - build_part = parts[2] - build_match = _build_tag_regex.match(build_part) - if build_match is None: - raise InvalidWheelFilename( - f"Invalid build number: {build_part} in '{filename}'" - ) - build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) - else: - build = () - tags = parse_tag(parts[-1]) - return (name, version, build, tags) - - -def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: - if filename.endswith(".tar.gz"): - file_stem = filename[: -len(".tar.gz")] - elif filename.endswith(".zip"): - file_stem = filename[: -len(".zip")] - else: - raise InvalidSdistFilename( - f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" - f" {filename}" - ) - - # We are requiring a PEP 440 version, which cannot contain dashes, - # so we split on the last dash. - name_part, sep, version_part = file_stem.rpartition("-") - if not sep: - raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") - - name = canonicalize_name(name_part) - - try: - version = Version(version_part) - except InvalidVersion as e: - raise InvalidSdistFilename( - f"Invalid sdist filename (invalid version): {filename}" - ) from e - - return (name, version) diff --git a/setuptools/_distutils/_vendor/packaging/version.py b/setuptools/_distutils/_vendor/packaging/version.py deleted file mode 100644 index 5faab9bd0d..0000000000 --- a/setuptools/_distutils/_vendor/packaging/version.py +++ /dev/null @@ -1,563 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -""" -.. testsetup:: - - from packaging.version import parse, Version -""" - -import itertools -import re -from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union - -from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType - -__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"] - -LocalType = Tuple[Union[int, str], ...] - -CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] -CmpLocalType = Union[ - NegativeInfinityType, - Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], -] -CmpKey = Tuple[ - int, - Tuple[int, ...], - CmpPrePostDevType, - CmpPrePostDevType, - CmpPrePostDevType, - CmpLocalType, -] -VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] - - -class _Version(NamedTuple): - epoch: int - release: Tuple[int, ...] - dev: Optional[Tuple[str, int]] - pre: Optional[Tuple[str, int]] - post: Optional[Tuple[str, int]] - local: Optional[LocalType] - - -def parse(version: str) -> "Version": - """Parse the given version string. - - >>> parse('1.0.dev1') - - - :param version: The version string to parse. - :raises InvalidVersion: When the version string is not a valid version. - """ - return Version(version) - - -class InvalidVersion(ValueError): - """Raised when a version string is not a valid version. - - >>> Version("invalid") - Traceback (most recent call last): - ... - packaging.version.InvalidVersion: Invalid version: 'invalid' - """ - - -class _BaseVersion: - _key: Tuple[Any, ...] - - def __hash__(self) -> int: - return hash(self._key) - - # Please keep the duplicated `isinstance` check - # in the six comparisons hereunder - # unless you find a way to avoid adding overhead function calls. - def __lt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key < other._key - - def __le__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key <= other._key - - def __eq__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key == other._key - - def __ge__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key >= other._key - - def __gt__(self, other: "_BaseVersion") -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key > other._key - - def __ne__(self, other: object) -> bool: - if not isinstance(other, _BaseVersion): - return NotImplemented - - return self._key != other._key - - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -_VERSION_PATTERN = r""" - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P
                                          # pre-release
-            [-_\.]?
-            (?Palpha|a|beta|b|preview|pre|c|rc)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-        (?P                                         # post release
-            (?:-(?P[0-9]+))
-            |
-            (?:
-                [-_\.]?
-                (?Ppost|rev|r)
-                [-_\.]?
-                (?P[0-9]+)?
-            )
-        )?
-        (?P                                          # dev release
-            [-_\.]?
-            (?Pdev)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-    )
-    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
-"""
-
-VERSION_PATTERN = _VERSION_PATTERN
-"""
-A string containing the regular expression used to match a valid version.
-
-The pattern is not anchored at either end, and is intended for embedding in larger
-expressions (for example, matching a version number as part of a file name). The
-regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
-flags set.
-
-:meta hide-value:
-"""
-
-
-class Version(_BaseVersion):
-    """This class abstracts handling of a project's versions.
-
-    A :class:`Version` instance is comparison aware and can be compared and
-    sorted using the standard Python interfaces.
-
-    >>> v1 = Version("1.0a5")
-    >>> v2 = Version("1.0")
-    >>> v1
-    
-    >>> v2
-    
-    >>> v1 < v2
-    True
-    >>> v1 == v2
-    False
-    >>> v1 > v2
-    False
-    >>> v1 >= v2
-    False
-    >>> v1 <= v2
-    True
-    """
-
-    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
-    _key: CmpKey
-
-    def __init__(self, version: str) -> None:
-        """Initialize a Version object.
-
-        :param version:
-            The string representation of a version which will be parsed and normalized
-            before use.
-        :raises InvalidVersion:
-            If the ``version`` does not conform to PEP 440 in any way then this
-            exception will be raised.
-        """
-
-        # Validate the version and parse it into pieces
-        match = self._regex.search(version)
-        if not match:
-            raise InvalidVersion(f"Invalid version: '{version}'")
-
-        # Store the parsed out pieces of the version
-        self._version = _Version(
-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
-            release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
-            post=_parse_letter_version(
-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
-            ),
-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
-            local=_parse_local_version(match.group("local")),
-        )
-
-        # Generate a key which will be used for sorting
-        self._key = _cmpkey(
-            self._version.epoch,
-            self._version.release,
-            self._version.pre,
-            self._version.post,
-            self._version.dev,
-            self._version.local,
-        )
-
-    def __repr__(self) -> str:
-        """A representation of the Version that shows all internal state.
-
-        >>> Version('1.0.0')
-        
-        """
-        return f""
-
-    def __str__(self) -> str:
-        """A string representation of the version that can be rounded-tripped.
-
-        >>> str(Version("1.0a5"))
-        '1.0a5'
-        """
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        # Pre-release
-        if self.pre is not None:
-            parts.append("".join(str(x) for x in self.pre))
-
-        # Post-release
-        if self.post is not None:
-            parts.append(f".post{self.post}")
-
-        # Development release
-        if self.dev is not None:
-            parts.append(f".dev{self.dev}")
-
-        # Local version segment
-        if self.local is not None:
-            parts.append(f"+{self.local}")
-
-        return "".join(parts)
-
-    @property
-    def epoch(self) -> int:
-        """The epoch of the version.
-
-        >>> Version("2.0.0").epoch
-        0
-        >>> Version("1!2.0.0").epoch
-        1
-        """
-        return self._version.epoch
-
-    @property
-    def release(self) -> Tuple[int, ...]:
-        """The components of the "release" segment of the version.
-
-        >>> Version("1.2.3").release
-        (1, 2, 3)
-        >>> Version("2.0.0").release
-        (2, 0, 0)
-        >>> Version("1!2.0.0.post0").release
-        (2, 0, 0)
-
-        Includes trailing zeroes but not the epoch or any pre-release / development /
-        post-release suffixes.
-        """
-        return self._version.release
-
-    @property
-    def pre(self) -> Optional[Tuple[str, int]]:
-        """The pre-release segment of the version.
-
-        >>> print(Version("1.2.3").pre)
-        None
-        >>> Version("1.2.3a1").pre
-        ('a', 1)
-        >>> Version("1.2.3b1").pre
-        ('b', 1)
-        >>> Version("1.2.3rc1").pre
-        ('rc', 1)
-        """
-        return self._version.pre
-
-    @property
-    def post(self) -> Optional[int]:
-        """The post-release number of the version.
-
-        >>> print(Version("1.2.3").post)
-        None
-        >>> Version("1.2.3.post1").post
-        1
-        """
-        return self._version.post[1] if self._version.post else None
-
-    @property
-    def dev(self) -> Optional[int]:
-        """The development number of the version.
-
-        >>> print(Version("1.2.3").dev)
-        None
-        >>> Version("1.2.3.dev1").dev
-        1
-        """
-        return self._version.dev[1] if self._version.dev else None
-
-    @property
-    def local(self) -> Optional[str]:
-        """The local version segment of the version.
-
-        >>> print(Version("1.2.3").local)
-        None
-        >>> Version("1.2.3+abc").local
-        'abc'
-        """
-        if self._version.local:
-            return ".".join(str(x) for x in self._version.local)
-        else:
-            return None
-
-    @property
-    def public(self) -> str:
-        """The public portion of the version.
-
-        >>> Version("1.2.3").public
-        '1.2.3'
-        >>> Version("1.2.3+abc").public
-        '1.2.3'
-        >>> Version("1.2.3+abc.dev1").public
-        '1.2.3'
-        """
-        return str(self).split("+", 1)[0]
-
-    @property
-    def base_version(self) -> str:
-        """The "base version" of the version.
-
-        >>> Version("1.2.3").base_version
-        '1.2.3'
-        >>> Version("1.2.3+abc").base_version
-        '1.2.3'
-        >>> Version("1!1.2.3+abc.dev1").base_version
-        '1!1.2.3'
-
-        The "base version" is the public version of the project without any pre or post
-        release markers.
-        """
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append(f"{self.epoch}!")
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        return "".join(parts)
-
-    @property
-    def is_prerelease(self) -> bool:
-        """Whether this version is a pre-release.
-
-        >>> Version("1.2.3").is_prerelease
-        False
-        >>> Version("1.2.3a1").is_prerelease
-        True
-        >>> Version("1.2.3b1").is_prerelease
-        True
-        >>> Version("1.2.3rc1").is_prerelease
-        True
-        >>> Version("1.2.3dev1").is_prerelease
-        True
-        """
-        return self.dev is not None or self.pre is not None
-
-    @property
-    def is_postrelease(self) -> bool:
-        """Whether this version is a post-release.
-
-        >>> Version("1.2.3").is_postrelease
-        False
-        >>> Version("1.2.3.post1").is_postrelease
-        True
-        """
-        return self.post is not None
-
-    @property
-    def is_devrelease(self) -> bool:
-        """Whether this version is a development release.
-
-        >>> Version("1.2.3").is_devrelease
-        False
-        >>> Version("1.2.3.dev1").is_devrelease
-        True
-        """
-        return self.dev is not None
-
-    @property
-    def major(self) -> int:
-        """The first item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").major
-        1
-        """
-        return self.release[0] if len(self.release) >= 1 else 0
-
-    @property
-    def minor(self) -> int:
-        """The second item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").minor
-        2
-        >>> Version("1").minor
-        0
-        """
-        return self.release[1] if len(self.release) >= 2 else 0
-
-    @property
-    def micro(self) -> int:
-        """The third item of :attr:`release` or ``0`` if unavailable.
-
-        >>> Version("1.2.3").micro
-        3
-        >>> Version("1").micro
-        0
-        """
-        return self.release[2] if len(self.release) >= 3 else 0
-
-
-def _parse_letter_version(
-    letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
-) -> Optional[Tuple[str, int]]:
-
-    if letter:
-        # We consider there to be an implicit 0 in a pre-release if there is
-        # not a numeral associated with it.
-        if number is None:
-            number = 0
-
-        # We normalize any letters to their lower case form
-        letter = letter.lower()
-
-        # We consider some words to be alternate spellings of other words and
-        # in those cases we want to normalize the spellings to our preferred
-        # spelling.
-        if letter == "alpha":
-            letter = "a"
-        elif letter == "beta":
-            letter = "b"
-        elif letter in ["c", "pre", "preview"]:
-            letter = "rc"
-        elif letter in ["rev", "r"]:
-            letter = "post"
-
-        return letter, int(number)
-    if not letter and number:
-        # We assume if we are given a number, but we are not given a letter
-        # then this is using the implicit post release syntax (e.g. 1.0-1)
-        letter = "post"
-
-        return letter, int(number)
-
-    return None
-
-
-_local_version_separators = re.compile(r"[\._-]")
-
-
-def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
-    """
-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
-    """
-    if local is not None:
-        return tuple(
-            part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_separators.split(local)
-        )
-    return None
-
-
-def _cmpkey(
-    epoch: int,
-    release: Tuple[int, ...],
-    pre: Optional[Tuple[str, int]],
-    post: Optional[Tuple[str, int]],
-    dev: Optional[Tuple[str, int]],
-    local: Optional[LocalType],
-) -> CmpKey:
-
-    # When we compare a release version, we want to compare it with all of the
-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
-    # leading zeros until we come to something non zero, then take the rest
-    # re-reverse it back into the correct order and make it a tuple and use
-    # that for our sorting key.
-    _release = tuple(
-        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
-    )
-
-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
-    # We'll do this by abusing the pre segment, but we _only_ want to do this
-    # if there is not a pre or a post segment. If we have one of those then
-    # the normal sorting rules will handle this case correctly.
-    if pre is None and post is None and dev is not None:
-        _pre: CmpPrePostDevType = NegativeInfinity
-    # Versions without a pre-release (except as noted above) should sort after
-    # those with one.
-    elif pre is None:
-        _pre = Infinity
-    else:
-        _pre = pre
-
-    # Versions without a post segment should sort before those with one.
-    if post is None:
-        _post: CmpPrePostDevType = NegativeInfinity
-
-    else:
-        _post = post
-
-    # Versions without a development segment should sort after those with one.
-    if dev is None:
-        _dev: CmpPrePostDevType = Infinity
-
-    else:
-        _dev = dev
-
-    if local is None:
-        # Versions without a local segment should sort before those with one.
-        _local: CmpLocalType = NegativeInfinity
-    else:
-        # Versions with a local segment need that segment parsed to implement
-        # the sorting rules in PEP440.
-        # - Alpha numeric segments sort before numeric segments
-        # - Alpha numeric segments sort lexicographically
-        # - Numeric segments sort numerically
-        # - Shorter versions sort before longer versions when the prefixes
-        #   match exactly
-        _local = tuple(
-            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
-        )
-
-    return epoch, _release, _pre, _post, _dev, _local
diff --git a/setuptools/_distutils/_vendor/ruff.toml b/setuptools/_distutils/_vendor/ruff.toml
deleted file mode 100644
index 00fee625a5..0000000000
--- a/setuptools/_distutils/_vendor/ruff.toml
+++ /dev/null
@@ -1 +0,0 @@
-exclude = ["*"]
diff --git a/setuptools/_distutils/archive_util.py b/setuptools/_distutils/archive_util.py
index 07cd97f4d0..cc4699b1a3 100644
--- a/setuptools/_distutils/archive_util.py
+++ b/setuptools/_distutils/archive_util.py
@@ -266,8 +266,7 @@ def make_archive(
         raise ValueError(f"unknown archive format '{format}'")
 
     func = format_info[0]
-    for arg, val in format_info[1]:
-        kwargs[arg] = val
+    kwargs.update(format_info[1])
 
     if format != 'zip':
         kwargs['owner'] = owner
diff --git a/setuptools/_distutils/command/build_ext.py b/setuptools/_distutils/command/build_ext.py
index cf475fe824..a7e3038be6 100644
--- a/setuptools/_distutils/command/build_ext.py
+++ b/setuptools/_distutils/command/build_ext.py
@@ -638,8 +638,7 @@ def swig_sources(self, sources, extension):
 
         # Do not override commandline arguments
         if not self.swig_opts:
-            for o in extension.swig_opts:
-                swig_cmd.append(o)
+            swig_cmd.extend(extension.swig_opts)
 
         for source in swig_sources:
             target = swig_targets[source]
diff --git a/setuptools/_distutils/command/install.py b/setuptools/_distutils/command/install.py
index 1fc09eef89..b83e061e02 100644
--- a/setuptools/_distutils/command/install.py
+++ b/setuptools/_distutils/command/install.py
@@ -680,7 +680,7 @@ def create_home_path(self):
         if not self.user:
             return
         home = convert_path(os.path.expanduser("~"))
-        for _name, path in self.config_vars.items():
+        for path in self.config_vars.values():
             if str(path).startswith(home) and not os.path.isdir(path):
                 self.debug_print(f"os.makedirs('{path}', 0o700)")
                 os.makedirs(path, 0o700)
diff --git a/setuptools/_distutils/command/install_lib.py b/setuptools/_distutils/command/install_lib.py
index 01579d46b4..4c1230a286 100644
--- a/setuptools/_distutils/command/install_lib.py
+++ b/setuptools/_distutils/command/install_lib.py
@@ -81,9 +81,9 @@ def finalize_options(self):
         if not isinstance(self.optimize, int):
             try:
                 self.optimize = int(self.optimize)
-                if self.optimize not in (0, 1, 2):
-                    raise AssertionError
-            except (ValueError, AssertionError):
+            except ValueError:
+                pass
+            if self.optimize not in (0, 1, 2):
                 raise DistutilsOptionError("optimize must be 0, 1, or 2")
 
     def run(self):
diff --git a/setuptools/_distutils/command/sdist.py b/setuptools/_distutils/command/sdist.py
index e8abb73920..eda6afe811 100644
--- a/setuptools/_distutils/command/sdist.py
+++ b/setuptools/_distutils/command/sdist.py
@@ -391,7 +391,7 @@ def prune_file_list(self):
         build = self.get_finalized_command('build')
         base_dir = self.distribution.get_fullname()
 
-        self.filelist.exclude_pattern(None, prefix=build.build_base)
+        self.filelist.exclude_pattern(None, prefix=os.fspath(build.build_base))
         self.filelist.exclude_pattern(None, prefix=base_dir)
 
         if sys.platform == 'win32':
diff --git a/setuptools/_distutils/command/wininst-10.0-amd64.exe b/setuptools/_distutils/command/wininst-10.0-amd64.exe
deleted file mode 100644
index 6fa0dce163..0000000000
Binary files a/setuptools/_distutils/command/wininst-10.0-amd64.exe and /dev/null differ
diff --git a/setuptools/_distutils/command/wininst-10.0.exe b/setuptools/_distutils/command/wininst-10.0.exe
deleted file mode 100644
index afc3bc6c14..0000000000
Binary files a/setuptools/_distutils/command/wininst-10.0.exe and /dev/null differ
diff --git a/setuptools/_distutils/command/wininst-14.0-amd64.exe b/setuptools/_distutils/command/wininst-14.0-amd64.exe
deleted file mode 100644
index 253c2e2ecc..0000000000
Binary files a/setuptools/_distutils/command/wininst-14.0-amd64.exe and /dev/null differ
diff --git a/setuptools/_distutils/command/wininst-14.0.exe b/setuptools/_distutils/command/wininst-14.0.exe
deleted file mode 100644
index 46f5f35667..0000000000
Binary files a/setuptools/_distutils/command/wininst-14.0.exe and /dev/null differ
diff --git a/setuptools/_distutils/command/wininst-6.0.exe b/setuptools/_distutils/command/wininst-6.0.exe
deleted file mode 100644
index f57c855a61..0000000000
Binary files a/setuptools/_distutils/command/wininst-6.0.exe and /dev/null differ
diff --git a/setuptools/_distutils/command/wininst-7.1.exe b/setuptools/_distutils/command/wininst-7.1.exe
deleted file mode 100644
index 1433bc1ad3..0000000000
Binary files a/setuptools/_distutils/command/wininst-7.1.exe and /dev/null differ
diff --git a/setuptools/_distutils/command/wininst-8.0.exe b/setuptools/_distutils/command/wininst-8.0.exe
deleted file mode 100644
index 7403bfabf5..0000000000
Binary files a/setuptools/_distutils/command/wininst-8.0.exe and /dev/null differ
diff --git a/setuptools/_distutils/command/wininst-9.0-amd64.exe b/setuptools/_distutils/command/wininst-9.0-amd64.exe
deleted file mode 100644
index 94fbd4341b..0000000000
Binary files a/setuptools/_distutils/command/wininst-9.0-amd64.exe and /dev/null differ
diff --git a/setuptools/_distutils/command/wininst-9.0.exe b/setuptools/_distutils/command/wininst-9.0.exe
deleted file mode 100644
index 2ec261f9fd..0000000000
Binary files a/setuptools/_distutils/command/wininst-9.0.exe and /dev/null differ
diff --git a/setuptools/_distutils/cygwinccompiler.py b/setuptools/_distutils/cygwinccompiler.py
index ce412e8329..18b1b3557b 100644
--- a/setuptools/_distutils/cygwinccompiler.py
+++ b/setuptools/_distutils/cygwinccompiler.py
@@ -172,8 +172,7 @@ def link(
 
             # Generate .def file
             contents = [f"LIBRARY {os.path.basename(output_filename)}", "EXPORTS"]
-            for sym in export_symbols:
-                contents.append(sym)
+            contents.extend(export_symbols)
             self.execute(write_file, (def_file, contents), f"writing {def_file}")
 
             # next add options for def-file
@@ -309,6 +308,9 @@ def check_config_h():
     fn = sysconfig.get_config_h_filename()
     try:
         config_h = pathlib.Path(fn).read_text(encoding='utf-8')
+    except OSError as exc:
+        return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}")
+    else:
         substring = '__GNUC__'
         if substring in config_h:
             code = CONFIG_H_OK
@@ -317,8 +319,6 @@ def check_config_h():
             code = CONFIG_H_NOTOK
             mention_inflected = 'does not mention'
         return code, f"{fn!r} {mention_inflected} {substring!r}"
-    except OSError as exc:
-        return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}")
 
 
 def is_cygwincc(cc):
diff --git a/setuptools/_distutils/dist.py b/setuptools/_distutils/dist.py
index 115302b3e7..154301baff 100644
--- a/setuptools/_distutils/dist.py
+++ b/setuptools/_distutils/dist.py
@@ -10,15 +10,11 @@
 import pathlib
 import re
 import sys
+import warnings
 from collections.abc import Iterable
 from email import message_from_file
 
-from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
-
-try:
-    import warnings
-except ImportError:
-    warnings = None
+from packaging.utils import canonicalize_name, canonicalize_version
 
 from ._log import log
 from .debug import DEBUG
@@ -249,10 +245,7 @@ def __init__(self, attrs=None):  # noqa: C901
                 attrs['license'] = attrs['licence']
                 del attrs['licence']
                 msg = "'licence' distribution option is deprecated; use 'license'"
-                if warnings is not None:
-                    warnings.warn(msg)
-                else:
-                    sys.stderr.write(msg + "\n")
+                warnings.warn(msg)
 
             # Now work on the rest of the attributes.  Any attribute that's
             # not already defined is invalid!
@@ -354,7 +347,8 @@ def _gen_paths(self):
         prefix = '.' * (os.name == 'posix')
         filename = prefix + 'pydistutils.cfg'
         if self.want_user_cfg:
-            yield pathlib.Path('~').expanduser() / filename
+            with contextlib.suppress(RuntimeError):
+                yield pathlib.Path('~').expanduser() / filename
 
         # All platforms support local setup.cfg
         yield pathlib.Path('setup.cfg')
@@ -741,9 +735,7 @@ def print_commands(self):
         import distutils.command
 
         std_commands = distutils.command.__all__
-        is_std = set()
-        for cmd in std_commands:
-            is_std.add(cmd)
+        is_std = set(std_commands)
 
         extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
 
@@ -769,9 +761,7 @@ def get_command_list(self):
         import distutils.command
 
         std_commands = distutils.command.__all__
-        is_std = set()
-        for cmd in std_commands:
-            is_std.add(cmd)
+        is_std = set(std_commands)
 
         extra_commands = [cmd for cmd in self.cmdclass.keys() if cmd not in is_std]
 
diff --git a/setuptools/_distutils/extension.py b/setuptools/_distutils/extension.py
index b302082f7a..33159079c1 100644
--- a/setuptools/_distutils/extension.py
+++ b/setuptools/_distutils/extension.py
@@ -105,7 +105,7 @@ def __init__(
         **kw,  # To catch unknown keywords
     ):
         if not isinstance(name, str):
-            raise AssertionError("'name' must be a string")
+            raise AssertionError("'name' must be a string")  # noqa: TRY004
         if not (
             isinstance(sources, list)
             and all(isinstance(v, (str, os.PathLike)) for v in sources)
diff --git a/setuptools/_distutils/file_util.py b/setuptools/_distutils/file_util.py
index b19a5dcfa4..85ee4dafcb 100644
--- a/setuptools/_distutils/file_util.py
+++ b/setuptools/_distutils/file_util.py
@@ -140,12 +140,13 @@ def copy_file(  # noqa: C901
         if not (os.path.exists(dst) and os.path.samefile(src, dst)):
             try:
                 os.link(src, dst)
-                return (dst, 1)
             except OSError:
                 # If hard linking fails, fall back on copying file
                 # (some special filesystems don't support hard linking
                 #  even under Unix, see issue #8876).
                 pass
+            else:
+                return (dst, 1)
     elif link == 'sym':
         if not (os.path.exists(dst) and os.path.samefile(src, dst)):
             os.symlink(src, dst)
diff --git a/setuptools/_distutils/msvc9compiler.py b/setuptools/_distutils/msvc9compiler.py
deleted file mode 100644
index 4c70848730..0000000000
--- a/setuptools/_distutils/msvc9compiler.py
+++ /dev/null
@@ -1,822 +0,0 @@
-"""distutils.msvc9compiler
-
-Contains MSVCCompiler, an implementation of the abstract CCompiler class
-for the Microsoft Visual Studio 2008.
-
-The module is compatible with VS 2005 and VS 2008. You can find legacy support
-for older versions of VS in distutils.msvccompiler.
-"""
-
-# Written by Perry Stoll
-# hacked by Robin Becker and Thomas Heller to do a better job of
-#   finding DevStudio (through the registry)
-# ported to VS2005 and VS 2008 by Christian Heimes
-
-import os
-import re
-import subprocess
-import sys
-import warnings
-import winreg
-
-from ._log import log
-from .ccompiler import CCompiler, gen_lib_options
-from .errors import (
-    CompileError,
-    DistutilsExecError,
-    DistutilsPlatformError,
-    LibError,
-    LinkError,
-)
-from .util import get_platform
-
-warnings.warn(
-    "msvc9compiler is deprecated and slated to be removed "
-    "in the future. Please discontinue use or file an issue "
-    "with pypa/distutils describing your use case.",
-    DeprecationWarning,
-)
-
-RegOpenKeyEx = winreg.OpenKeyEx
-RegEnumKey = winreg.EnumKey
-RegEnumValue = winreg.EnumValue
-RegError = winreg.error
-
-HKEYS = (
-    winreg.HKEY_USERS,
-    winreg.HKEY_CURRENT_USER,
-    winreg.HKEY_LOCAL_MACHINE,
-    winreg.HKEY_CLASSES_ROOT,
-)
-
-NATIVE_WIN64 = sys.platform == 'win32' and sys.maxsize > 2**32
-if NATIVE_WIN64:
-    # Visual C++ is a 32-bit application, so we need to look in
-    # the corresponding registry branch, if we're running a
-    # 64-bit Python on Win64
-    VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f"
-    WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows"
-    NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework"
-else:
-    VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
-    WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
-    NET_BASE = r"Software\Microsoft\.NETFramework"
-
-# A map keyed by get_platform() return values to values accepted by
-# 'vcvarsall.bat'.  Note a cross-compile may combine these (eg, 'x86_amd64' is
-# the param to cross-compile on x86 targeting amd64.)
-PLAT_TO_VCVARS = {
-    'win32': 'x86',
-    'win-amd64': 'amd64',
-}
-
-
-class Reg:
-    """Helper class to read values from the registry"""
-
-    def get_value(cls, path, key):
-        for base in HKEYS:
-            d = cls.read_values(base, path)
-            if d and key in d:
-                return d[key]
-        raise KeyError(key)
-
-    get_value = classmethod(get_value)
-
-    def read_keys(cls, base, key):
-        """Return list of registry keys."""
-        try:
-            handle = RegOpenKeyEx(base, key)
-        except RegError:
-            return None
-        L = []
-        i = 0
-        while True:
-            try:
-                k = RegEnumKey(handle, i)
-            except RegError:
-                break
-            L.append(k)
-            i += 1
-        return L
-
-    read_keys = classmethod(read_keys)
-
-    def read_values(cls, base, key):
-        """Return dict of registry keys and values.
-
-        All names are converted to lowercase.
-        """
-        try:
-            handle = RegOpenKeyEx(base, key)
-        except RegError:
-            return None
-        d = {}
-        i = 0
-        while True:
-            try:
-                name, value, type = RegEnumValue(handle, i)
-            except RegError:
-                break
-            name = name.lower()
-            d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
-            i += 1
-        return d
-
-    read_values = classmethod(read_values)
-
-    def convert_mbcs(s):
-        dec = getattr(s, "decode", None)
-        if dec is not None:
-            try:
-                s = dec("mbcs")
-            except UnicodeError:
-                pass
-        return s
-
-    convert_mbcs = staticmethod(convert_mbcs)
-
-
-class MacroExpander:
-    def __init__(self, version):
-        self.macros = {}
-        self.vsbase = VS_BASE % version
-        self.load_macros(version)
-
-    def set_macro(self, macro, path, key):
-        self.macros[f"$({macro})"] = Reg.get_value(path, key)
-
-    def load_macros(self, version):
-        self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
-        self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
-        self.set_macro("FrameworkDir", NET_BASE, "installroot")
-        try:
-            if version >= 8.0:
-                self.set_macro("FrameworkSDKDir", NET_BASE, "sdkinstallrootv2.0")
-            else:
-                raise KeyError("sdkinstallrootv2.0")
-        except KeyError:
-            raise DistutilsPlatformError(
-                """Python was built with Visual Studio 2008;
-extensions must be built with a compiler than can generate compatible binaries.
-Visual Studio 2008 was not found on this system. If you have Cygwin installed,
-you can try compiling with MingW32, by passing "-c mingw32" to setup.py."""
-            )
-
-        if version >= 9.0:
-            self.set_macro("FrameworkVersion", self.vsbase, "clr version")
-            self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
-        else:
-            p = r"Software\Microsoft\NET Framework Setup\Product"
-            for base in HKEYS:
-                try:
-                    h = RegOpenKeyEx(base, p)
-                except RegError:
-                    continue
-                key = RegEnumKey(h, 0)
-                d = Reg.get_value(base, rf"{p}\{key}")
-                self.macros["$(FrameworkVersion)"] = d["version"]
-
-    def sub(self, s):
-        for k, v in self.macros.items():
-            s = s.replace(k, v)
-        return s
-
-
-def get_build_version():
-    """Return the version of MSVC that was used to build Python.
-
-    For Python 2.3 and up, the version number is included in
-    sys.version.  For earlier versions, assume the compiler is MSVC 6.
-    """
-    prefix = "MSC v."
-    i = sys.version.find(prefix)
-    if i == -1:
-        return 6
-    i = i + len(prefix)
-    s, rest = sys.version[i:].split(" ", 1)
-    majorVersion = int(s[:-2]) - 6
-    if majorVersion >= 13:
-        # v13 was skipped and should be v14
-        majorVersion += 1
-    minorVersion = int(s[2:3]) / 10.0
-    # I don't think paths are affected by minor version in version 6
-    if majorVersion == 6:
-        minorVersion = 0
-    if majorVersion >= 6:
-        return majorVersion + minorVersion
-    # else we don't know what version of the compiler this is
-    return None
-
-
-def normalize_and_reduce_paths(paths):
-    """Return a list of normalized paths with duplicates removed.
-
-    The current order of paths is maintained.
-    """
-    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
-    reduced_paths = []
-    for p in paths:
-        np = os.path.normpath(p)
-        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
-        if np not in reduced_paths:
-            reduced_paths.append(np)
-    return reduced_paths
-
-
-def removeDuplicates(variable):
-    """Remove duplicate values of an environment variable."""
-    oldList = variable.split(os.pathsep)
-    newList = []
-    for i in oldList:
-        if i not in newList:
-            newList.append(i)
-    newVariable = os.pathsep.join(newList)
-    return newVariable
-
-
-def find_vcvarsall(version):
-    """Find the vcvarsall.bat file
-
-    At first it tries to find the productdir of VS 2008 in the registry. If
-    that fails it falls back to the VS90COMNTOOLS env var.
-    """
-    vsbase = VS_BASE % version
-    try:
-        productdir = Reg.get_value(rf"{vsbase}\Setup\VC", "productdir")
-    except KeyError:
-        log.debug("Unable to find productdir in registry")
-        productdir = None
-
-    if not productdir or not os.path.isdir(productdir):
-        toolskey = f"VS{version:0.0f}0COMNTOOLS"
-        toolsdir = os.environ.get(toolskey, None)
-
-        if toolsdir and os.path.isdir(toolsdir):
-            productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
-            productdir = os.path.abspath(productdir)
-            if not os.path.isdir(productdir):
-                log.debug(f"{productdir} is not a valid directory")
-                return None
-        else:
-            log.debug(f"Env var {toolskey} is not set or invalid")
-    if not productdir:
-        log.debug("No productdir found")
-        return None
-    vcvarsall = os.path.join(productdir, "vcvarsall.bat")
-    if os.path.isfile(vcvarsall):
-        return vcvarsall
-    log.debug("Unable to find vcvarsall.bat")
-    return None
-
-
-def query_vcvarsall(version, arch="x86"):
-    """Launch vcvarsall.bat and read the settings from its environment"""
-    vcvarsall = find_vcvarsall(version)
-    interesting = {"include", "lib", "libpath", "path"}
-    result = {}
-
-    if vcvarsall is None:
-        raise DistutilsPlatformError("Unable to find vcvarsall.bat")
-    log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
-    popen = subprocess.Popen(
-        f'"{vcvarsall}" {arch} & set',
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE,
-    )
-    try:
-        stdout, stderr = popen.communicate()
-        if popen.wait() != 0:
-            raise DistutilsPlatformError(stderr.decode("mbcs"))
-
-        stdout = stdout.decode("mbcs")
-        for line in stdout.split("\n"):
-            line = Reg.convert_mbcs(line)
-            if '=' not in line:
-                continue
-            line = line.strip()
-            key, value = line.split('=', 1)
-            key = key.lower()
-            if key in interesting:
-                if value.endswith(os.pathsep):
-                    value = value[:-1]
-                result[key] = removeDuplicates(value)
-
-    finally:
-        popen.stdout.close()
-        popen.stderr.close()
-
-    if len(result) != len(interesting):
-        raise ValueError(str(list(result.keys())))
-
-    return result
-
-
-# More globals
-VERSION = get_build_version()
-# MACROS = MacroExpander(VERSION)
-
-
-class MSVCCompiler(CCompiler):
-    """Concrete class that implements an interface to Microsoft Visual C++,
-    as defined by the CCompiler abstract class."""
-
-    compiler_type = 'msvc'
-
-    # Just set this so CCompiler's constructor doesn't barf.  We currently
-    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
-    # as it really isn't necessary for this sort of single-compiler class.
-    # Would be nice to have a consistent interface with UnixCCompiler,
-    # though, so it's worth thinking about.
-    executables = {}
-
-    # Private class data (need to distinguish C from C++ source for compiler)
-    _c_extensions = ['.c']
-    _cpp_extensions = ['.cc', '.cpp', '.cxx']
-    _rc_extensions = ['.rc']
-    _mc_extensions = ['.mc']
-
-    # Needed for the filename generation methods provided by the
-    # base class, CCompiler.
-    src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions
-    res_extension = '.res'
-    obj_extension = '.obj'
-    static_lib_extension = '.lib'
-    shared_lib_extension = '.dll'
-    static_lib_format = shared_lib_format = '%s%s'
-    exe_extension = '.exe'
-
-    def __init__(self, verbose=False, dry_run=False, force=False):
-        super().__init__(verbose, dry_run, force)
-        self.__version = VERSION
-        self.__root = r"Software\Microsoft\VisualStudio"
-        # self.__macros = MACROS
-        self.__paths = []
-        # target platform (.plat_name is consistent with 'bdist')
-        self.plat_name = None
-        self.__arch = None  # deprecated name
-        self.initialized = False
-
-    def initialize(self, plat_name=None):  # noqa: C901
-        # multi-init means we would need to check platform same each time...
-        assert not self.initialized, "don't init multiple times"
-        if self.__version < 8.0:
-            raise DistutilsPlatformError(
-                f"VC {self.__version:0.1f} is not supported by this module"
-            )
-        if plat_name is None:
-            plat_name = get_platform()
-        # sanity check for platforms to prevent obscure errors later.
-        ok_plats = 'win32', 'win-amd64'
-        if plat_name not in ok_plats:
-            raise DistutilsPlatformError(f"--plat-name must be one of {ok_plats}")
-
-        if (
-            "DISTUTILS_USE_SDK" in os.environ
-            and "MSSdk" in os.environ
-            and self.find_exe("cl.exe")
-        ):
-            # Assume that the SDK set up everything alright; don't try to be
-            # smarter
-            self.cc = "cl.exe"
-            self.linker = "link.exe"
-            self.lib = "lib.exe"
-            self.rc = "rc.exe"
-            self.mc = "mc.exe"
-        else:
-            # On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
-            # to cross compile, you use 'x86_amd64'.
-            # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
-            # compile use 'x86' (ie, it runs the x86 compiler directly)
-            if plat_name in (get_platform(), 'win32'):
-                # native build or cross-compile to win32
-                plat_spec = PLAT_TO_VCVARS[plat_name]
-            else:
-                # cross compile from win32 -> some 64bit
-                plat_spec = (
-                    PLAT_TO_VCVARS[get_platform()] + '_' + PLAT_TO_VCVARS[plat_name]
-                )
-
-            vc_env = query_vcvarsall(VERSION, plat_spec)
-
-            self.__paths = vc_env['path'].split(os.pathsep)
-            os.environ['lib'] = vc_env['lib']
-            os.environ['include'] = vc_env['include']
-
-            if len(self.__paths) == 0:
-                raise DistutilsPlatformError(
-                    f"Python was built with {self.__product}, "
-                    "and extensions need to be built with the same "
-                    "version of the compiler, but it isn't installed."
-                )
-
-            self.cc = self.find_exe("cl.exe")
-            self.linker = self.find_exe("link.exe")
-            self.lib = self.find_exe("lib.exe")
-            self.rc = self.find_exe("rc.exe")  # resource compiler
-            self.mc = self.find_exe("mc.exe")  # message compiler
-            # self.set_path_env_var('lib')
-            # self.set_path_env_var('include')
-
-        # extend the MSVC path with the current path
-        try:
-            for p in os.environ['path'].split(';'):
-                self.__paths.append(p)
-        except KeyError:
-            pass
-        self.__paths = normalize_and_reduce_paths(self.__paths)
-        os.environ['path'] = ";".join(self.__paths)
-
-        self.preprocess_options = None
-        if self.__arch == "x86":
-            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/DNDEBUG']
-            self.compile_options_debug = [
-                '/nologo',
-                '/Od',
-                '/MDd',
-                '/W3',
-                '/Z7',
-                '/D_DEBUG',
-            ]
-        else:
-            # Win64
-            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG']
-            self.compile_options_debug = [
-                '/nologo',
-                '/Od',
-                '/MDd',
-                '/W3',
-                '/GS-',
-                '/Z7',
-                '/D_DEBUG',
-            ]
-
-        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
-        if self.__version >= 7:
-            self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG']
-        self.ldflags_static = ['/nologo']
-
-        self.initialized = True
-
-    # -- Worker methods ------------------------------------------------
-
-    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
-        # Copied from ccompiler.py, extended to return .res as 'object'-file
-        # for .rc input file
-        if output_dir is None:
-            output_dir = ''
-        obj_names = []
-        for src_name in source_filenames:
-            (base, ext) = os.path.splitext(src_name)
-            base = os.path.splitdrive(base)[1]  # Chop off the drive
-            base = base[os.path.isabs(base) :]  # If abs, chop off leading /
-            if ext not in self.src_extensions:
-                # Better to raise an exception instead of silently continuing
-                # and later complain about sources and targets having
-                # different lengths
-                raise CompileError(f"Don't know how to compile {src_name}")
-            if strip_dir:
-                base = os.path.basename(base)
-            if ext in self._rc_extensions:
-                obj_names.append(os.path.join(output_dir, base + self.res_extension))
-            elif ext in self._mc_extensions:
-                obj_names.append(os.path.join(output_dir, base + self.res_extension))
-            else:
-                obj_names.append(os.path.join(output_dir, base + self.obj_extension))
-        return obj_names
-
-    def compile(  # noqa: C901
-        self,
-        sources,
-        output_dir=None,
-        macros=None,
-        include_dirs=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        depends=None,
-    ):
-        if not self.initialized:
-            self.initialize()
-        compile_info = self._setup_compile(
-            output_dir, macros, include_dirs, sources, depends, extra_postargs
-        )
-        macros, objects, extra_postargs, pp_opts, build = compile_info
-
-        compile_opts = extra_preargs or []
-        compile_opts.append('/c')
-        if debug:
-            compile_opts.extend(self.compile_options_debug)
-        else:
-            compile_opts.extend(self.compile_options)
-
-        for obj in objects:
-            try:
-                src, ext = build[obj]
-            except KeyError:
-                continue
-            if debug:
-                # pass the full pathname to MSVC in debug mode,
-                # this allows the debugger to find the source file
-                # without asking the user to browse for it
-                src = os.path.abspath(src)
-
-            if ext in self._c_extensions:
-                input_opt = "/Tc" + src
-            elif ext in self._cpp_extensions:
-                input_opt = "/Tp" + src
-            elif ext in self._rc_extensions:
-                # compile .RC to .RES file
-                input_opt = src
-                output_opt = "/fo" + obj
-                try:
-                    self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt])
-                except DistutilsExecError as msg:
-                    raise CompileError(msg)
-                continue
-            elif ext in self._mc_extensions:
-                # Compile .MC to .RC file to .RES file.
-                #   * '-h dir' specifies the directory for the
-                #     generated include file
-                #   * '-r dir' specifies the target directory of the
-                #     generated RC file and the binary message resource
-                #     it includes
-                #
-                # For now (since there are no options to change this),
-                # we use the source-directory for the include file and
-                # the build directory for the RC file and message
-                # resources. This works at least for win32all.
-                h_dir = os.path.dirname(src)
-                rc_dir = os.path.dirname(obj)
-                try:
-                    # first compile .MC to .RC and .H file
-                    self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src])
-                    base, _ = os.path.splitext(os.path.basename(src))
-                    rc_file = os.path.join(rc_dir, base + '.rc')
-                    # then compile .RC to .RES file
-                    self.spawn([self.rc] + ["/fo" + obj] + [rc_file])
-
-                except DistutilsExecError as msg:
-                    raise CompileError(msg)
-                continue
-            else:
-                # how to handle this file?
-                raise CompileError(f"Don't know how to compile {src} to {obj}")
-
-            output_opt = "/Fo" + obj
-            try:
-                self.spawn(
-                    [self.cc]
-                    + compile_opts
-                    + pp_opts
-                    + [input_opt, output_opt]
-                    + extra_postargs
-                )
-            except DistutilsExecError as msg:
-                raise CompileError(msg)
-
-        return objects
-
-    def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
-    ):
-        if not self.initialized:
-            self.initialize()
-        (objects, output_dir) = self._fix_object_args(objects, output_dir)
-        output_filename = self.library_filename(output_libname, output_dir=output_dir)
-
-        if self._need_link(objects, output_filename):
-            lib_args = objects + ['/OUT:' + output_filename]
-            if debug:
-                pass  # XXX what goes here?
-            try:
-                self.spawn([self.lib] + lib_args)
-            except DistutilsExecError as msg:
-                raise LibError(msg)
-        else:
-            log.debug("skipping %s (up-to-date)", output_filename)
-
-    def link(  # noqa: C901
-        self,
-        target_desc,
-        objects,
-        output_filename,
-        output_dir=None,
-        libraries=None,
-        library_dirs=None,
-        runtime_library_dirs=None,
-        export_symbols=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        build_temp=None,
-        target_lang=None,
-    ):
-        if not self.initialized:
-            self.initialize()
-        (objects, output_dir) = self._fix_object_args(objects, output_dir)
-        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
-        (libraries, library_dirs, runtime_library_dirs) = fixed_args
-
-        if runtime_library_dirs:
-            self.warn(
-                "I don't know what to do with 'runtime_library_dirs': "
-                + str(runtime_library_dirs)
-            )
-
-        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
-        if output_dir is not None:
-            output_filename = os.path.join(output_dir, output_filename)
-
-        if self._need_link(objects, output_filename):
-            if target_desc == CCompiler.EXECUTABLE:
-                if debug:
-                    ldflags = self.ldflags_shared_debug[1:]
-                else:
-                    ldflags = self.ldflags_shared[1:]
-            else:
-                if debug:
-                    ldflags = self.ldflags_shared_debug
-                else:
-                    ldflags = self.ldflags_shared
-
-            export_opts = [f"/EXPORT:{sym}" for sym in export_symbols or []]
-
-            ld_args = (
-                ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]
-            )
-
-            # The MSVC linker generates .lib and .exp files, which cannot be
-            # suppressed by any linker switches. The .lib files may even be
-            # needed! Make sure they are generated in the temporary build
-            # directory. Since they have different names for debug and release
-            # builds, they can go into the same directory.
-            build_temp = os.path.dirname(objects[0])
-            if export_symbols is not None:
-                (dll_name, dll_ext) = os.path.splitext(
-                    os.path.basename(output_filename)
-                )
-                implib_file = os.path.join(build_temp, self.library_filename(dll_name))
-                ld_args.append('/IMPLIB:' + implib_file)
-
-            self.manifest_setup_ldargs(output_filename, build_temp, ld_args)
-
-            if extra_preargs:
-                ld_args[:0] = extra_preargs
-            if extra_postargs:
-                ld_args.extend(extra_postargs)
-
-            self.mkpath(os.path.dirname(output_filename))
-            try:
-                self.spawn([self.linker] + ld_args)
-            except DistutilsExecError as msg:
-                raise LinkError(msg)
-
-            # embed the manifest
-            # XXX - this is somewhat fragile - if mt.exe fails, distutils
-            # will still consider the DLL up-to-date, but it will not have a
-            # manifest.  Maybe we should link to a temp file?  OTOH, that
-            # implies a build environment error that shouldn't go undetected.
-            mfinfo = self.manifest_get_embed_info(target_desc, ld_args)
-            if mfinfo is not None:
-                mffilename, mfid = mfinfo
-                out_arg = f'-outputresource:{output_filename};{mfid}'
-                try:
-                    self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg])
-                except DistutilsExecError as msg:
-                    raise LinkError(msg)
-        else:
-            log.debug("skipping %s (up-to-date)", output_filename)
-
-    def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
-        # If we need a manifest at all, an embedded manifest is recommended.
-        # See MSDN article titled
-        # "Understanding manifest generation for C/C++ programs"
-        # (currently at https://learn.microsoft.com/en-us/cpp/build/understanding-manifest-generation-for-c-cpp-programs)
-        # Ask the linker to generate the manifest in the temp dir, so
-        # we can check it, and possibly embed it, later.
-        temp_manifest = os.path.join(
-            build_temp, os.path.basename(output_filename) + ".manifest"
-        )
-        ld_args.append('/MANIFESTFILE:' + temp_manifest)
-
-    def manifest_get_embed_info(self, target_desc, ld_args):
-        # If a manifest should be embedded, return a tuple of
-        # (manifest_filename, resource_id).  Returns None if no manifest
-        # should be embedded.  See https://bugs.python.org/issue7833 for why
-        # we want to avoid any manifest for extension modules if we can)
-        for arg in ld_args:
-            if arg.startswith("/MANIFESTFILE:"):
-                temp_manifest = arg.split(":", 1)[1]
-                break
-        else:
-            # no /MANIFESTFILE so nothing to do.
-            return None
-        if target_desc == CCompiler.EXECUTABLE:
-            # by default, executables always get the manifest with the
-            # CRT referenced.
-            mfid = 1
-        else:
-            # Extension modules try and avoid any manifest if possible.
-            mfid = 2
-            temp_manifest = self._remove_visual_c_ref(temp_manifest)
-        if temp_manifest is None:
-            return None
-        return temp_manifest, mfid
-
-    def _remove_visual_c_ref(self, manifest_file):
-        try:
-            # Remove references to the Visual C runtime, so they will
-            # fall through to the Visual C dependency of Python.exe.
-            # This way, when installed for a restricted user (e.g.
-            # runtimes are not in WinSxS folder, but in Python's own
-            # folder), the runtimes do not need to be in every folder
-            # with .pyd's.
-            # Returns either the filename of the modified manifest or
-            # None if no manifest should be embedded.
-            manifest_f = open(manifest_file)
-            try:
-                manifest_buf = manifest_f.read()
-            finally:
-                manifest_f.close()
-            pattern = re.compile(
-                r"""|)""",
-                re.DOTALL,
-            )
-            manifest_buf = re.sub(pattern, "", manifest_buf)
-            pattern = r"\s*"
-            manifest_buf = re.sub(pattern, "", manifest_buf)
-            # Now see if any other assemblies are referenced - if not, we
-            # don't want a manifest embedded.
-            pattern = re.compile(
-                r"""|)""",
-                re.DOTALL,
-            )
-            if re.search(pattern, manifest_buf) is None:
-                return None
-
-            manifest_f = open(manifest_file, 'w')
-            try:
-                manifest_f.write(manifest_buf)
-                return manifest_file
-            finally:
-                manifest_f.close()
-        except OSError:
-            pass
-
-    # -- Miscellaneous methods -----------------------------------------
-    # These are all used by the 'gen_lib_options() function, in
-    # ccompiler.py.
-
-    def library_dir_option(self, dir):
-        return "/LIBPATH:" + dir
-
-    def runtime_library_dir_option(self, dir):
-        raise DistutilsPlatformError(
-            "don't know how to set runtime library search path for MSVC++"
-        )
-
-    def library_option(self, lib):
-        return self.library_filename(lib)
-
-    def find_library_file(self, dirs, lib, debug=False):
-        # Prefer a debugging library if found (and requested), but deal
-        # with it if we don't have one.
-        if debug:
-            try_names = [lib + "_d", lib]
-        else:
-            try_names = [lib]
-        for dir in dirs:
-            for name in try_names:
-                libfile = os.path.join(dir, self.library_filename(name))
-                if os.path.exists(libfile):
-                    return libfile
-        else:
-            # Oops, didn't find it in *any* of 'dirs'
-            return None
-
-    # Helper methods for using the MSVC registry settings
-
-    def find_exe(self, exe):
-        """Return path to an MSVC executable program.
-
-        Tries to find the program in several places: first, one of the
-        MSVC program search paths from the registry; next, the directories
-        in the PATH environment variable.  If any of those work, return an
-        absolute path that is known to exist.  If none of them work, just
-        return the original program name, 'exe'.
-        """
-        for p in self.__paths:
-            fn = os.path.join(os.path.abspath(p), exe)
-            if os.path.isfile(fn):
-                return fn
-
-        # didn't find it; try existing path
-        for p in os.environ['Path'].split(';'):
-            fn = os.path.join(os.path.abspath(p), exe)
-            if os.path.isfile(fn):
-                return fn
-
-        return exe
diff --git a/setuptools/_distutils/msvccompiler.py b/setuptools/_distutils/msvccompiler.py
deleted file mode 100644
index 2a5e61d78d..0000000000
--- a/setuptools/_distutils/msvccompiler.py
+++ /dev/null
@@ -1,687 +0,0 @@
-"""distutils.msvccompiler
-
-Contains MSVCCompiler, an implementation of the abstract CCompiler class
-for the Microsoft Visual Studio.
-"""
-
-# Written by Perry Stoll
-# hacked by Robin Becker and Thomas Heller to do a better job of
-#   finding DevStudio (through the registry)
-
-import os
-import sys
-import warnings
-
-from ._log import log
-from .ccompiler import CCompiler, gen_lib_options
-from .errors import (
-    CompileError,
-    DistutilsExecError,
-    DistutilsPlatformError,
-    LibError,
-    LinkError,
-)
-
-_can_read_reg = False
-try:
-    import winreg
-
-    _can_read_reg = True
-    hkey_mod = winreg
-
-    RegOpenKeyEx = winreg.OpenKeyEx
-    RegEnumKey = winreg.EnumKey
-    RegEnumValue = winreg.EnumValue
-    RegError = winreg.error
-
-except ImportError:
-    try:
-        import win32api
-        import win32con
-
-        _can_read_reg = True
-        hkey_mod = win32con
-
-        RegOpenKeyEx = win32api.RegOpenKeyEx
-        RegEnumKey = win32api.RegEnumKey
-        RegEnumValue = win32api.RegEnumValue
-        RegError = win32api.error
-    except ImportError:
-        log.info(
-            "Warning: Can't read registry to find the "
-            "necessary compiler setting\n"
-            "Make sure that Python modules winreg, "
-            "win32api or win32con are installed."
-        )
-        pass
-
-if _can_read_reg:
-    HKEYS = (
-        hkey_mod.HKEY_USERS,
-        hkey_mod.HKEY_CURRENT_USER,
-        hkey_mod.HKEY_LOCAL_MACHINE,
-        hkey_mod.HKEY_CLASSES_ROOT,
-    )
-
-
-warnings.warn(
-    "msvccompiler is deprecated and slated to be removed "
-    "in the future. Please discontinue use or file an issue "
-    "with pypa/distutils describing your use case.",
-    DeprecationWarning,
-)
-
-
-def read_keys(base, key):
-    """Return list of registry keys."""
-    try:
-        handle = RegOpenKeyEx(base, key)
-    except RegError:
-        return None
-    L = []
-    i = 0
-    while True:
-        try:
-            k = RegEnumKey(handle, i)
-        except RegError:
-            break
-        L.append(k)
-        i += 1
-    return L
-
-
-def read_values(base, key):
-    """Return dict of registry keys and values.
-
-    All names are converted to lowercase.
-    """
-    try:
-        handle = RegOpenKeyEx(base, key)
-    except RegError:
-        return None
-    d = {}
-    i = 0
-    while True:
-        try:
-            name, value, type = RegEnumValue(handle, i)
-        except RegError:
-            break
-        name = name.lower()
-        d[convert_mbcs(name)] = convert_mbcs(value)
-        i += 1
-    return d
-
-
-def convert_mbcs(s):
-    dec = getattr(s, "decode", None)
-    if dec is not None:
-        try:
-            s = dec("mbcs")
-        except UnicodeError:
-            pass
-    return s
-
-
-class MacroExpander:
-    def __init__(self, version):
-        self.macros = {}
-        self.load_macros(version)
-
-    def set_macro(self, macro, path, key):
-        for base in HKEYS:
-            d = read_values(base, path)
-            if d:
-                self.macros[f"$({macro})"] = d[key]
-                break
-
-    def load_macros(self, version):
-        vsbase = rf"Software\Microsoft\VisualStudio\{version:0.1f}"
-        self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
-        self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
-        net = r"Software\Microsoft\.NETFramework"
-        self.set_macro("FrameworkDir", net, "installroot")
-        try:
-            if version > 7.0:
-                self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
-            else:
-                self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
-        except KeyError:
-            raise DistutilsPlatformError(
-                """Python was built with Visual Studio 2003;
-extensions must be built with a compiler than can generate compatible binaries.
-Visual Studio 2003 was not found on this system. If you have Cygwin installed,
-you can try compiling with MingW32, by passing "-c mingw32" to setup.py."""
-            )
-
-        p = r"Software\Microsoft\NET Framework Setup\Product"
-        for base in HKEYS:
-            try:
-                h = RegOpenKeyEx(base, p)
-            except RegError:
-                continue
-            key = RegEnumKey(h, 0)
-            d = read_values(base, rf"{p}\{key}")
-            self.macros["$(FrameworkVersion)"] = d["version"]
-
-    def sub(self, s):
-        for k, v in self.macros.items():
-            s = s.replace(k, v)
-        return s
-
-
-def get_build_version():
-    """Return the version of MSVC that was used to build Python.
-
-    For Python 2.3 and up, the version number is included in
-    sys.version.  For earlier versions, assume the compiler is MSVC 6.
-    """
-    prefix = "MSC v."
-    i = sys.version.find(prefix)
-    if i == -1:
-        return 6
-    i = i + len(prefix)
-    s, rest = sys.version[i:].split(" ", 1)
-    majorVersion = int(s[:-2]) - 6
-    if majorVersion >= 13:
-        # v13 was skipped and should be v14
-        majorVersion += 1
-    minorVersion = int(s[2:3]) / 10.0
-    # I don't think paths are affected by minor version in version 6
-    if majorVersion == 6:
-        minorVersion = 0
-    if majorVersion >= 6:
-        return majorVersion + minorVersion
-    # else we don't know what version of the compiler this is
-    return None
-
-
-def get_build_architecture():
-    """Return the processor architecture.
-
-    Possible results are "Intel" or "AMD64".
-    """
-
-    prefix = " bit ("
-    i = sys.version.find(prefix)
-    if i == -1:
-        return "Intel"
-    j = sys.version.find(")", i)
-    return sys.version[i + len(prefix) : j]
-
-
-def normalize_and_reduce_paths(paths):
-    """Return a list of normalized paths with duplicates removed.
-
-    The current order of paths is maintained.
-    """
-    # Paths are normalized so things like:  /a and /a/ aren't both preserved.
-    reduced_paths = []
-    for p in paths:
-        np = os.path.normpath(p)
-        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
-        if np not in reduced_paths:
-            reduced_paths.append(np)
-    return reduced_paths
-
-
-class MSVCCompiler(CCompiler):
-    """Concrete class that implements an interface to Microsoft Visual C++,
-    as defined by the CCompiler abstract class."""
-
-    compiler_type = 'msvc'
-
-    # Just set this so CCompiler's constructor doesn't barf.  We currently
-    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
-    # as it really isn't necessary for this sort of single-compiler class.
-    # Would be nice to have a consistent interface with UnixCCompiler,
-    # though, so it's worth thinking about.
-    executables = {}
-
-    # Private class data (need to distinguish C from C++ source for compiler)
-    _c_extensions = ['.c']
-    _cpp_extensions = ['.cc', '.cpp', '.cxx']
-    _rc_extensions = ['.rc']
-    _mc_extensions = ['.mc']
-
-    # Needed for the filename generation methods provided by the
-    # base class, CCompiler.
-    src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions
-    res_extension = '.res'
-    obj_extension = '.obj'
-    static_lib_extension = '.lib'
-    shared_lib_extension = '.dll'
-    static_lib_format = shared_lib_format = '%s%s'
-    exe_extension = '.exe'
-
-    def __init__(self, verbose=False, dry_run=False, force=False):
-        super().__init__(verbose, dry_run, force)
-        self.__version = get_build_version()
-        self.__arch = get_build_architecture()
-        if self.__arch == "Intel":
-            # x86
-            if self.__version >= 7:
-                self.__root = r"Software\Microsoft\VisualStudio"
-                self.__macros = MacroExpander(self.__version)
-            else:
-                self.__root = r"Software\Microsoft\Devstudio"
-            self.__product = f"Visual Studio version {self.__version}"
-        else:
-            # Win64. Assume this was built with the platform SDK
-            self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
-
-        self.initialized = False
-
-    def initialize(self):
-        self.__paths = []
-        if (
-            "DISTUTILS_USE_SDK" in os.environ
-            and "MSSdk" in os.environ
-            and self.find_exe("cl.exe")
-        ):
-            # Assume that the SDK set up everything alright; don't try to be
-            # smarter
-            self.cc = "cl.exe"
-            self.linker = "link.exe"
-            self.lib = "lib.exe"
-            self.rc = "rc.exe"
-            self.mc = "mc.exe"
-        else:
-            self.__paths = self.get_msvc_paths("path")
-
-            if len(self.__paths) == 0:
-                raise DistutilsPlatformError(
-                    f"Python was built with {self.__product}, "
-                    "and extensions need to be built with the same "
-                    "version of the compiler, but it isn't installed."
-                )
-
-            self.cc = self.find_exe("cl.exe")
-            self.linker = self.find_exe("link.exe")
-            self.lib = self.find_exe("lib.exe")
-            self.rc = self.find_exe("rc.exe")  # resource compiler
-            self.mc = self.find_exe("mc.exe")  # message compiler
-            self.set_path_env_var('lib')
-            self.set_path_env_var('include')
-
-        # extend the MSVC path with the current path
-        try:
-            for p in os.environ['path'].split(';'):
-                self.__paths.append(p)
-        except KeyError:
-            pass
-        self.__paths = normalize_and_reduce_paths(self.__paths)
-        os.environ['path'] = ";".join(self.__paths)
-
-        self.preprocess_options = None
-        if self.__arch == "Intel":
-            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GX', '/DNDEBUG']
-            self.compile_options_debug = [
-                '/nologo',
-                '/Od',
-                '/MDd',
-                '/W3',
-                '/GX',
-                '/Z7',
-                '/D_DEBUG',
-            ]
-        else:
-            # Win64
-            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG']
-            self.compile_options_debug = [
-                '/nologo',
-                '/Od',
-                '/MDd',
-                '/W3',
-                '/GS-',
-                '/Z7',
-                '/D_DEBUG',
-            ]
-
-        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
-        if self.__version >= 7:
-            self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG']
-        else:
-            self.ldflags_shared_debug = [
-                '/DLL',
-                '/nologo',
-                '/INCREMENTAL:no',
-                '/pdb:None',
-                '/DEBUG',
-            ]
-        self.ldflags_static = ['/nologo']
-
-        self.initialized = True
-
-    # -- Worker methods ------------------------------------------------
-
-    def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
-        # Copied from ccompiler.py, extended to return .res as 'object'-file
-        # for .rc input file
-        if output_dir is None:
-            output_dir = ''
-        obj_names = []
-        for src_name in source_filenames:
-            (base, ext) = os.path.splitext(src_name)
-            base = os.path.splitdrive(base)[1]  # Chop off the drive
-            base = base[os.path.isabs(base) :]  # If abs, chop off leading /
-            if ext not in self.src_extensions:
-                # Better to raise an exception instead of silently continuing
-                # and later complain about sources and targets having
-                # different lengths
-                raise CompileError(f"Don't know how to compile {src_name}")
-            if strip_dir:
-                base = os.path.basename(base)
-            if ext in self._rc_extensions:
-                obj_names.append(os.path.join(output_dir, base + self.res_extension))
-            elif ext in self._mc_extensions:
-                obj_names.append(os.path.join(output_dir, base + self.res_extension))
-            else:
-                obj_names.append(os.path.join(output_dir, base + self.obj_extension))
-        return obj_names
-
-    def compile(  # noqa: C901
-        self,
-        sources,
-        output_dir=None,
-        macros=None,
-        include_dirs=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        depends=None,
-    ):
-        if not self.initialized:
-            self.initialize()
-        compile_info = self._setup_compile(
-            output_dir, macros, include_dirs, sources, depends, extra_postargs
-        )
-        macros, objects, extra_postargs, pp_opts, build = compile_info
-
-        compile_opts = extra_preargs or []
-        compile_opts.append('/c')
-        if debug:
-            compile_opts.extend(self.compile_options_debug)
-        else:
-            compile_opts.extend(self.compile_options)
-
-        for obj in objects:
-            try:
-                src, ext = build[obj]
-            except KeyError:
-                continue
-            if debug:
-                # pass the full pathname to MSVC in debug mode,
-                # this allows the debugger to find the source file
-                # without asking the user to browse for it
-                src = os.path.abspath(src)
-
-            if ext in self._c_extensions:
-                input_opt = "/Tc" + src
-            elif ext in self._cpp_extensions:
-                input_opt = "/Tp" + src
-            elif ext in self._rc_extensions:
-                # compile .RC to .RES file
-                input_opt = src
-                output_opt = "/fo" + obj
-                try:
-                    self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt])
-                except DistutilsExecError as msg:
-                    raise CompileError(msg)
-                continue
-            elif ext in self._mc_extensions:
-                # Compile .MC to .RC file to .RES file.
-                #   * '-h dir' specifies the directory for the
-                #     generated include file
-                #   * '-r dir' specifies the target directory of the
-                #     generated RC file and the binary message resource
-                #     it includes
-                #
-                # For now (since there are no options to change this),
-                # we use the source-directory for the include file and
-                # the build directory for the RC file and message
-                # resources. This works at least for win32all.
-                h_dir = os.path.dirname(src)
-                rc_dir = os.path.dirname(obj)
-                try:
-                    # first compile .MC to .RC and .H file
-                    self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src])
-                    base, _ = os.path.splitext(os.path.basename(src))
-                    rc_file = os.path.join(rc_dir, base + '.rc')
-                    # then compile .RC to .RES file
-                    self.spawn([self.rc] + ["/fo" + obj] + [rc_file])
-
-                except DistutilsExecError as msg:
-                    raise CompileError(msg)
-                continue
-            else:
-                # how to handle this file?
-                raise CompileError(f"Don't know how to compile {src} to {obj}")
-
-            output_opt = "/Fo" + obj
-            try:
-                self.spawn(
-                    [self.cc]
-                    + compile_opts
-                    + pp_opts
-                    + [input_opt, output_opt]
-                    + extra_postargs
-                )
-            except DistutilsExecError as msg:
-                raise CompileError(msg)
-
-        return objects
-
-    def create_static_lib(
-        self, objects, output_libname, output_dir=None, debug=False, target_lang=None
-    ):
-        if not self.initialized:
-            self.initialize()
-        (objects, output_dir) = self._fix_object_args(objects, output_dir)
-        output_filename = self.library_filename(output_libname, output_dir=output_dir)
-
-        if self._need_link(objects, output_filename):
-            lib_args = objects + ['/OUT:' + output_filename]
-            if debug:
-                pass  # XXX what goes here?
-            try:
-                self.spawn([self.lib] + lib_args)
-            except DistutilsExecError as msg:
-                raise LibError(msg)
-        else:
-            log.debug("skipping %s (up-to-date)", output_filename)
-
-    def link(  # noqa: C901
-        self,
-        target_desc,
-        objects,
-        output_filename,
-        output_dir=None,
-        libraries=None,
-        library_dirs=None,
-        runtime_library_dirs=None,
-        export_symbols=None,
-        debug=False,
-        extra_preargs=None,
-        extra_postargs=None,
-        build_temp=None,
-        target_lang=None,
-    ):
-        if not self.initialized:
-            self.initialize()
-        (objects, output_dir) = self._fix_object_args(objects, output_dir)
-        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
-        (libraries, library_dirs, runtime_library_dirs) = fixed_args
-
-        if runtime_library_dirs:
-            self.warn(
-                "I don't know what to do with 'runtime_library_dirs': "
-                + str(runtime_library_dirs)
-            )
-
-        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries)
-        if output_dir is not None:
-            output_filename = os.path.join(output_dir, output_filename)
-
-        if self._need_link(objects, output_filename):
-            if target_desc == CCompiler.EXECUTABLE:
-                if debug:
-                    ldflags = self.ldflags_shared_debug[1:]
-                else:
-                    ldflags = self.ldflags_shared[1:]
-            else:
-                if debug:
-                    ldflags = self.ldflags_shared_debug
-                else:
-                    ldflags = self.ldflags_shared
-
-            export_opts = [f"/EXPORT:{sym}" for sym in export_symbols or []]
-
-            ld_args = (
-                ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename]
-            )
-
-            # The MSVC linker generates .lib and .exp files, which cannot be
-            # suppressed by any linker switches. The .lib files may even be
-            # needed! Make sure they are generated in the temporary build
-            # directory. Since they have different names for debug and release
-            # builds, they can go into the same directory.
-            if export_symbols is not None:
-                (dll_name, dll_ext) = os.path.splitext(
-                    os.path.basename(output_filename)
-                )
-                implib_file = os.path.join(
-                    os.path.dirname(objects[0]), self.library_filename(dll_name)
-                )
-                ld_args.append('/IMPLIB:' + implib_file)
-
-            if extra_preargs:
-                ld_args[:0] = extra_preargs
-            if extra_postargs:
-                ld_args.extend(extra_postargs)
-
-            self.mkpath(os.path.dirname(output_filename))
-            try:
-                self.spawn([self.linker] + ld_args)
-            except DistutilsExecError as msg:
-                raise LinkError(msg)
-
-        else:
-            log.debug("skipping %s (up-to-date)", output_filename)
-
-    # -- Miscellaneous methods -----------------------------------------
-    # These are all used by the 'gen_lib_options() function, in
-    # ccompiler.py.
-
-    def library_dir_option(self, dir):
-        return "/LIBPATH:" + dir
-
-    def runtime_library_dir_option(self, dir):
-        raise DistutilsPlatformError(
-            "don't know how to set runtime library search path for MSVC++"
-        )
-
-    def library_option(self, lib):
-        return self.library_filename(lib)
-
-    def find_library_file(self, dirs, lib, debug=False):
-        # Prefer a debugging library if found (and requested), but deal
-        # with it if we don't have one.
-        if debug:
-            try_names = [lib + "_d", lib]
-        else:
-            try_names = [lib]
-        for dir in dirs:
-            for name in try_names:
-                libfile = os.path.join(dir, self.library_filename(name))
-                if os.path.exists(libfile):
-                    return libfile
-        else:
-            # Oops, didn't find it in *any* of 'dirs'
-            return None
-
-    # Helper methods for using the MSVC registry settings
-
-    def find_exe(self, exe):
-        """Return path to an MSVC executable program.
-
-        Tries to find the program in several places: first, one of the
-        MSVC program search paths from the registry; next, the directories
-        in the PATH environment variable.  If any of those work, return an
-        absolute path that is known to exist.  If none of them work, just
-        return the original program name, 'exe'.
-        """
-        for p in self.__paths:
-            fn = os.path.join(os.path.abspath(p), exe)
-            if os.path.isfile(fn):
-                return fn
-
-        # didn't find it; try existing path
-        for p in os.environ['Path'].split(';'):
-            fn = os.path.join(os.path.abspath(p), exe)
-            if os.path.isfile(fn):
-                return fn
-
-        return exe
-
-    def get_msvc_paths(self, path, platform='x86'):
-        """Get a list of devstudio directories (include, lib or path).
-
-        Return a list of strings.  The list will be empty if unable to
-        access the registry or appropriate registry keys not found.
-        """
-        if not _can_read_reg:
-            return []
-
-        path = path + " dirs"
-        if self.__version >= 7:
-            key = rf"{self.__root}\{self.__version:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
-        else:
-            key = (
-                rf"{self.__root}\6.0\Build System\Components\Platforms"
-                rf"\Win32 ({platform})\Directories"
-            )
-
-        for base in HKEYS:
-            d = read_values(base, key)
-            if d:
-                if self.__version >= 7:
-                    return self.__macros.sub(d[path]).split(";")
-                else:
-                    return d[path].split(";")
-        # MSVC 6 seems to create the registry entries we need only when
-        # the GUI is run.
-        if self.__version == 6:
-            for base in HKEYS:
-                if read_values(base, rf"{self.__root}\6.0") is not None:
-                    self.warn(
-                        "It seems you have Visual Studio 6 installed, "
-                        "but the expected registry settings are not present.\n"
-                        "You must at least run the Visual Studio GUI once "
-                        "so that these entries are created."
-                    )
-                    break
-        return []
-
-    def set_path_env_var(self, name):
-        """Set environment variable 'name' to an MSVC path type value.
-
-        This is equivalent to a SET command prior to execution of spawned
-        commands.
-        """
-
-        if name == "lib":
-            p = self.get_msvc_paths("library")
-        else:
-            p = self.get_msvc_paths(name)
-        if p:
-            os.environ[name] = ';'.join(p)
-
-
-if get_build_version() >= 8.0:
-    log.debug("Importing new compiler from distutils.msvc9compiler")
-    OldMSVCCompiler = MSVCCompiler
-    # get_build_architecture not really relevant now we support cross-compile
-    from distutils.msvc9compiler import (
-        MacroExpander,
-        MSVCCompiler,
-    )
diff --git a/setuptools/_distutils/sysconfig.py b/setuptools/_distutils/sysconfig.py
index fbdd5d73ae..28a7c571dc 100644
--- a/setuptools/_distutils/sysconfig.py
+++ b/setuptools/_distutils/sysconfig.py
@@ -236,7 +236,7 @@ def get_python_lib(plat_specific=False, standard_lib=False, prefix=None):
         if prefix is None:
             prefix = PREFIX
         if standard_lib:
-            return os.path.join(prefix, "lib-python", sys.version[0])
+            return os.path.join(prefix, "lib-python", sys.version_info.major)
         return os.path.join(prefix, 'site-packages')
 
     early_prefix = prefix
diff --git a/setuptools/_distutils/tests/test_msvc9compiler.py b/setuptools/_distutils/tests/test_msvc9compiler.py
deleted file mode 100644
index 6f6aabee4d..0000000000
--- a/setuptools/_distutils/tests/test_msvc9compiler.py
+++ /dev/null
@@ -1,184 +0,0 @@
-"""Tests for distutils.msvc9compiler."""
-
-import os
-import sys
-from distutils.errors import DistutilsPlatformError
-from distutils.tests import support
-
-import pytest
-
-# A manifest with the only assembly reference being the msvcrt assembly, so
-# should have the assembly completely stripped.  Note that although the
-# assembly has a  reference the assembly is removed - that is
-# currently a "feature", not a bug :)
-_MANIFEST_WITH_ONLY_MSVC_REFERENCE = """\
-
-
-  
-    
-      
-        
-        
-      
-    
-  
-  
-    
-      
-      
-    
-  
-
-"""
-
-# A manifest with references to assemblies other than msvcrt.  When processed,
-# this assembly should be returned with just the msvcrt part removed.
-_MANIFEST_WITH_MULTIPLE_REFERENCES = """\
-
-
-  
-    
-      
-        
-        
-      
-    
-  
-  
-    
-      
-      
-    
-  
-  
-    
-      
-    
-  
-
-"""
-
-_CLEANED_MANIFEST = """\
-
-
-  
-    
-      
-        
-        
-      
-    
-  
-  
-
-  
-  
-    
-      
-    
-  
-"""
-
-if sys.platform == "win32":
-    from distutils.msvccompiler import get_build_version
-
-    if get_build_version() >= 8.0:
-        SKIP_MESSAGE = None
-    else:
-        SKIP_MESSAGE = "These tests are only for MSVC8.0 or above"
-else:
-    SKIP_MESSAGE = "These tests are only for win32"
-
-
-@pytest.mark.skipif('SKIP_MESSAGE', reason=SKIP_MESSAGE)
-class Testmsvc9compiler(support.TempdirManager):
-    def test_no_compiler(self):
-        # makes sure query_vcvarsall raises
-        # a DistutilsPlatformError if the compiler
-        # is not found
-        from distutils.msvc9compiler import query_vcvarsall
-
-        def _find_vcvarsall(version):
-            return None
-
-        from distutils import msvc9compiler
-
-        old_find_vcvarsall = msvc9compiler.find_vcvarsall
-        msvc9compiler.find_vcvarsall = _find_vcvarsall
-        try:
-            with pytest.raises(DistutilsPlatformError):
-                query_vcvarsall('wont find this version')
-        finally:
-            msvc9compiler.find_vcvarsall = old_find_vcvarsall
-
-    def test_reg_class(self):
-        from distutils.msvc9compiler import Reg
-
-        with pytest.raises(KeyError):
-            Reg.get_value('xxx', 'xxx')
-
-        # looking for values that should exist on all
-        # windows registry versions.
-        path = r'Control Panel\Desktop'
-        v = Reg.get_value(path, 'dragfullwindows')
-        assert v in ('0', '1', '2')
-
-        import winreg
-
-        HKCU = winreg.HKEY_CURRENT_USER
-        keys = Reg.read_keys(HKCU, 'xxxx')
-        assert keys is None
-
-        keys = Reg.read_keys(HKCU, r'Control Panel')
-        assert 'Desktop' in keys
-
-    def test_remove_visual_c_ref(self):
-        from distutils.msvc9compiler import MSVCCompiler
-
-        tempdir = self.mkdtemp()
-        manifest = os.path.join(tempdir, 'manifest')
-        f = open(manifest, 'w')
-        try:
-            f.write(_MANIFEST_WITH_MULTIPLE_REFERENCES)
-        finally:
-            f.close()
-
-        compiler = MSVCCompiler()
-        compiler._remove_visual_c_ref(manifest)
-
-        # see what we got
-        f = open(manifest)
-        try:
-            # removing trailing spaces
-            content = '\n'.join([line.rstrip() for line in f])
-        finally:
-            f.close()
-
-        # makes sure the manifest was properly cleaned
-        assert content == _CLEANED_MANIFEST
-
-    def test_remove_entire_manifest(self):
-        from distutils.msvc9compiler import MSVCCompiler
-
-        tempdir = self.mkdtemp()
-        manifest = os.path.join(tempdir, 'manifest')
-        f = open(manifest, 'w')
-        try:
-            f.write(_MANIFEST_WITH_ONLY_MSVC_REFERENCE)
-        finally:
-            f.close()
-
-        compiler = MSVCCompiler()
-        got = compiler._remove_visual_c_ref(manifest)
-        assert got is None
diff --git a/setuptools/_distutils/tests/test_msvccompiler.py b/setuptools/_distutils/tests/test_msvccompiler.py
index 23b6c732c3..71129cae27 100644
--- a/setuptools/_distutils/tests/test_msvccompiler.py
+++ b/setuptools/_distutils/tests/test_msvccompiler.py
@@ -14,22 +14,19 @@
 
 
 class Testmsvccompiler(support.TempdirManager):
-    def test_no_compiler(self):
+    def test_no_compiler(self, monkeypatch):
         # makes sure query_vcvarsall raises
         # a DistutilsPlatformError if the compiler
         # is not found
         def _find_vcvarsall(plat_spec):
             return None, None
 
-        old_find_vcvarsall = _msvccompiler._find_vcvarsall
-        _msvccompiler._find_vcvarsall = _find_vcvarsall
-        try:
-            with pytest.raises(DistutilsPlatformError):
-                _msvccompiler._get_vc_env(
-                    'wont find this version',
-                )
-        finally:
-            _msvccompiler._find_vcvarsall = old_find_vcvarsall
+        monkeypatch.setattr(_msvccompiler, '_find_vcvarsall', _find_vcvarsall)
+
+        with pytest.raises(DistutilsPlatformError):
+            _msvccompiler._get_vc_env(
+                'wont find this version',
+            )
 
     @needs_winreg
     def test_get_vc_env_unicode(self):
diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/INSTALLER b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER
similarity index 100%
rename from setuptools/_distutils/_vendor/packaging-24.0.dist-info/INSTALLER
rename to setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA
new file mode 100644
index 0000000000..fe6ca5ad88
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA
@@ -0,0 +1,85 @@
+Metadata-Version: 2.1
+Name: jaraco.collections
+Version: 5.1.0
+Summary: Collection objects similar to those in stdlib by jaraco
+Author-email: "Jason R. Coombs" 
+Project-URL: Source, https://github.com/jaraco/jaraco.collections
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: jaraco.text
+Provides-Extra: check
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'check'
+Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'check'
+Provides-Extra: cover
+Requires-Dist: pytest-cov ; extra == 'cover'
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
+Provides-Extra: enabler
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'enabler'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Provides-Extra: type
+Requires-Dist: pytest-mypy ; extra == 'type'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.collections.svg
+   :target: https://pypi.org/project/jaraco.collections
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.collections.svg
+
+.. image:: https://github.com/jaraco/jaraco.collections/actions/workflows/main.yml/badge.svg
+   :target: https://github.com/jaraco/jaraco.collections/actions?query=workflow%3A%22tests%22
+   :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+    :target: https://github.com/astral-sh/ruff
+    :alt: Ruff
+
+.. image:: https://readthedocs.org/projects/jaracocollections/badge/?version=latest
+   :target: https://jaracocollections.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2024-informational
+   :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/jaraco.collections
+   :target: https://tidelift.com/subscription/pkg/pypi-jaraco.collections?utm_source=pypi-jaraco.collections&utm_medium=readme
+
+Models and classes to supplement the stdlib 'collections' module.
+
+See the docs, linked above, for descriptions and usage examples.
+
+Highlights include:
+
+- RangeMap: A mapping that accepts a range of values for keys.
+- Projection: A subset over an existing mapping.
+- KeyTransformingDict: Generalized mapping with keys transformed by a function.
+- FoldedCaseKeyedDict: A dict whose string keys are case-insensitive.
+- BijectiveMap: A map where keys map to values and values back to their keys.
+- ItemsAsAttributes: A mapping mix-in exposing items as attributes.
+- IdentityOverrideMap: A map whose keys map by default to themselves unless overridden.
+- FrozenDict: A hashable, immutable map.
+- Enumeration: An object whose keys are enumerated.
+- Everything: A container that contains all things.
+- Least, Greatest: Objects that are always less than or greater than any other.
+- pop_all: Return all items from the mutable sequence and remove them from that sequence.
+- DictStack: A stack of dicts, great for sharing scopes.
+- WeightedLookup: A specialized RangeMap for selecting an item by weights.
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more `_.
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD
new file mode 100644
index 0000000000..48b957ec88
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/RECORD
@@ -0,0 +1,10 @@
+jaraco.collections-5.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.collections-5.1.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023
+jaraco.collections-5.1.0.dist-info/METADATA,sha256=IMUaliNsA5X1Ox9MXUWOagch5R4Wwb_3M7erp29dBtg,3933
+jaraco.collections-5.1.0.dist-info/RECORD,,
+jaraco.collections-5.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jaraco.collections-5.1.0.dist-info/WHEEL,sha256=Mdi9PDNwEZptOjTlUcAth7XJDFtKrHYaQMPulZeBCiQ,91
+jaraco.collections-5.1.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/collections/__init__.py,sha256=Pc1-SqjWm81ad1P0-GttpkwO_LWlnaY6gUq8gcKh2v0,26640
+jaraco/collections/__pycache__/__init__.cpython-312.pyc,,
+jaraco/collections/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/REQUESTED b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED
similarity index 100%
rename from setuptools/_distutils/_vendor/packaging-24.0.dist-info/REQUESTED
rename to setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED
diff --git a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/WHEEL b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL
similarity index 64%
rename from setuptools/_distutils/_vendor/packaging-24.0.dist-info/WHEEL
rename to setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL
index 3b5e64b5e6..50e1e84e4a 100644
--- a/setuptools/_distutils/_vendor/packaging-24.0.dist-info/WHEEL
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL
@@ -1,4 +1,5 @@
 Wheel-Version: 1.0
-Generator: flit 3.9.0
+Generator: setuptools (73.0.1)
 Root-Is-Purelib: true
 Tag: py3-none-any
+
diff --git a/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..f6205a5f19
--- /dev/null
+++ b/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/setuptools/_vendor/jaraco/collections/__init__.py b/setuptools/_vendor/jaraco/collections/__init__.py
new file mode 100644
index 0000000000..0d501cf9e9
--- /dev/null
+++ b/setuptools/_vendor/jaraco/collections/__init__.py
@@ -0,0 +1,1091 @@
+from __future__ import annotations
+
+import collections.abc
+import copy
+import functools
+import itertools
+import operator
+import random
+import re
+from collections.abc import Container, Iterable, Mapping
+from typing import TYPE_CHECKING, Any, Callable, Dict, TypeVar, Union, overload
+
+import jaraco.text
+
+if TYPE_CHECKING:
+    from _operator import _SupportsComparison
+
+    from _typeshed import SupportsKeysAndGetItem
+    from typing_extensions import Self
+
+    _RangeMapKT = TypeVar('_RangeMapKT', bound=_SupportsComparison)
+else:
+    # _SupportsComparison doesn't exist at runtime,
+    # but _RangeMapKT is used in RangeMap's superclass' type parameters
+    _RangeMapKT = TypeVar('_RangeMapKT')
+
+_T = TypeVar('_T')
+_VT = TypeVar('_VT')
+
+_Matchable = Union[Callable, Container, Iterable, re.Pattern]
+
+
+def _dispatch(obj: _Matchable) -> Callable:
+    # can't rely on singledispatch for Union[Container, Iterable]
+    # due to ambiguity
+    # (https://peps.python.org/pep-0443/#abstract-base-classes).
+    if isinstance(obj, re.Pattern):
+        return obj.fullmatch
+    # mypy issue: https://github.com/python/mypy/issues/11071
+    if not isinstance(obj, Callable):  # type: ignore[arg-type]
+        if not isinstance(obj, Container):
+            obj = set(obj)  # type: ignore[arg-type]
+        obj = obj.__contains__
+    return obj  # type: ignore[return-value]
+
+
+class Projection(collections.abc.Mapping):
+    """
+    Project a set of keys over a mapping
+
+    >>> sample = {'a': 1, 'b': 2, 'c': 3}
+    >>> prj = Projection(['a', 'c', 'd'], sample)
+    >>> dict(prj)
+    {'a': 1, 'c': 3}
+
+    Projection also accepts an iterable or callable or pattern.
+
+    >>> iter_prj = Projection(iter('acd'), sample)
+    >>> call_prj = Projection(lambda k: ord(k) in (97, 99, 100), sample)
+    >>> pat_prj = Projection(re.compile(r'[acd]'), sample)
+    >>> prj == iter_prj == call_prj == pat_prj
+    True
+
+    Keys should only appear if they were specified and exist in the space.
+    Order is retained.
+
+    >>> list(prj)
+    ['a', 'c']
+
+    Attempting to access a key not in the projection
+    results in a KeyError.
+
+    >>> prj['b']
+    Traceback (most recent call last):
+    ...
+    KeyError: 'b'
+
+    Use the projection to update another dict.
+
+    >>> target = {'a': 2, 'b': 2}
+    >>> target.update(prj)
+    >>> target
+    {'a': 1, 'b': 2, 'c': 3}
+
+    Projection keeps a reference to the original dict, so
+    modifying the original dict may modify the Projection.
+
+    >>> del sample['a']
+    >>> dict(prj)
+    {'c': 3}
+    """
+
+    def __init__(self, keys: _Matchable, space: Mapping):
+        self._match = _dispatch(keys)
+        self._space = space
+
+    def __getitem__(self, key):
+        if not self._match(key):
+            raise KeyError(key)
+        return self._space[key]
+
+    def _keys_resolved(self):
+        return filter(self._match, self._space)
+
+    def __iter__(self):
+        return self._keys_resolved()
+
+    def __len__(self):
+        return len(tuple(self._keys_resolved()))
+
+
+class Mask(Projection):
+    """
+    The inverse of a :class:`Projection`, masking out keys.
+
+    >>> sample = {'a': 1, 'b': 2, 'c': 3}
+    >>> msk = Mask(['a', 'c', 'd'], sample)
+    >>> dict(msk)
+    {'b': 2}
+    """
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        # self._match = compose(operator.not_, self._match)
+        self._match = lambda key, orig=self._match: not orig(key)
+
+
+def dict_map(function, dictionary):
+    """
+    Return a new dict with function applied to values of dictionary.
+
+    >>> dict_map(lambda x: x+1, dict(a=1, b=2))
+    {'a': 2, 'b': 3}
+    """
+    return dict((key, function(value)) for key, value in dictionary.items())
+
+
+class RangeMap(Dict[_RangeMapKT, _VT]):
+    """
+    A dictionary-like object that uses the keys as bounds for a range.
+    Inclusion of the value for that range is determined by the
+    key_match_comparator, which defaults to less-than-or-equal.
+    A value is returned for a key if it is the first key that matches in
+    the sorted list of keys.
+
+    One may supply keyword parameters to be passed to the sort function used
+    to sort keys (i.e. key, reverse) as sort_params.
+
+    Create a map that maps 1-3 -> 'a', 4-6 -> 'b'
+
+    >>> r = RangeMap({3: 'a', 6: 'b'})  # boy, that was easy
+    >>> r[1], r[2], r[3], r[4], r[5], r[6]
+    ('a', 'a', 'a', 'b', 'b', 'b')
+
+    Even float values should work so long as the comparison operator
+    supports it.
+
+    >>> r[4.5]
+    'b'
+
+    Notice that the way rangemap is defined, it must be open-ended
+    on one side.
+
+    >>> r[0]
+    'a'
+    >>> r[-1]
+    'a'
+
+    One can close the open-end of the RangeMap by using undefined_value
+
+    >>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'})
+    >>> r[0]
+    Traceback (most recent call last):
+    ...
+    KeyError: 0
+
+    One can get the first or last elements in the range by using RangeMap.Item
+
+    >>> last_item = RangeMap.Item(-1)
+    >>> r[last_item]
+    'b'
+
+    .last_item is a shortcut for Item(-1)
+
+    >>> r[RangeMap.last_item]
+    'b'
+
+    Sometimes it's useful to find the bounds for a RangeMap
+
+    >>> r.bounds()
+    (0, 6)
+
+    RangeMap supports .get(key, default)
+
+    >>> r.get(0, 'not found')
+    'not found'
+
+    >>> r.get(7, 'not found')
+    'not found'
+
+    One often wishes to define the ranges by their left-most values,
+    which requires use of sort params and a key_match_comparator.
+
+    >>> r = RangeMap({1: 'a', 4: 'b'},
+    ...     sort_params=dict(reverse=True),
+    ...     key_match_comparator=operator.ge)
+    >>> r[1], r[2], r[3], r[4], r[5], r[6]
+    ('a', 'a', 'a', 'b', 'b', 'b')
+
+    That wasn't nearly as easy as before, so an alternate constructor
+    is provided:
+
+    >>> r = RangeMap.left({1: 'a', 4: 'b', 7: RangeMap.undefined_value})
+    >>> r[1], r[2], r[3], r[4], r[5], r[6]
+    ('a', 'a', 'a', 'b', 'b', 'b')
+
+    """
+
+    def __init__(
+        self,
+        source: (
+            SupportsKeysAndGetItem[_RangeMapKT, _VT] | Iterable[tuple[_RangeMapKT, _VT]]
+        ),
+        sort_params: Mapping[str, Any] = {},
+        key_match_comparator: Callable[[_RangeMapKT, _RangeMapKT], bool] = operator.le,
+    ):
+        dict.__init__(self, source)
+        self.sort_params = sort_params
+        self.match = key_match_comparator
+
+    @classmethod
+    def left(
+        cls,
+        source: (
+            SupportsKeysAndGetItem[_RangeMapKT, _VT] | Iterable[tuple[_RangeMapKT, _VT]]
+        ),
+    ) -> Self:
+        return cls(
+            source, sort_params=dict(reverse=True), key_match_comparator=operator.ge
+        )
+
+    def __getitem__(self, item: _RangeMapKT) -> _VT:
+        sorted_keys = sorted(self.keys(), **self.sort_params)
+        if isinstance(item, RangeMap.Item):
+            result = self.__getitem__(sorted_keys[item])
+        else:
+            key = self._find_first_match_(sorted_keys, item)
+            result = dict.__getitem__(self, key)
+            if result is RangeMap.undefined_value:
+                raise KeyError(key)
+        return result
+
+    @overload  # type: ignore[override] # Signature simplified over dict and Mapping
+    def get(self, key: _RangeMapKT, default: _T) -> _VT | _T: ...
+    @overload
+    def get(self, key: _RangeMapKT, default: None = None) -> _VT | None: ...
+    def get(self, key: _RangeMapKT, default: _T | None = None) -> _VT | _T | None:
+        """
+        Return the value for key if key is in the dictionary, else default.
+        If default is not given, it defaults to None, so that this method
+        never raises a KeyError.
+        """
+        try:
+            return self[key]
+        except KeyError:
+            return default
+
+    def _find_first_match_(
+        self, keys: Iterable[_RangeMapKT], item: _RangeMapKT
+    ) -> _RangeMapKT:
+        is_match = functools.partial(self.match, item)
+        matches = filter(is_match, keys)
+        try:
+            return next(matches)
+        except StopIteration:
+            raise KeyError(item) from None
+
+    def bounds(self) -> tuple[_RangeMapKT, _RangeMapKT]:
+        sorted_keys = sorted(self.keys(), **self.sort_params)
+        return (sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item])
+
+    # some special values for the RangeMap
+    undefined_value = type('RangeValueUndefined', (), {})()
+
+    class Item(int):
+        """RangeMap Item"""
+
+    first_item = Item(0)
+    last_item = Item(-1)
+
+
+def __identity(x):
+    return x
+
+
+def sorted_items(d, key=__identity, reverse=False):
+    """
+    Return the items of the dictionary sorted by the keys.
+
+    >>> sample = dict(foo=20, bar=42, baz=10)
+    >>> tuple(sorted_items(sample))
+    (('bar', 42), ('baz', 10), ('foo', 20))
+
+    >>> reverse_string = lambda s: ''.join(reversed(s))
+    >>> tuple(sorted_items(sample, key=reverse_string))
+    (('foo', 20), ('bar', 42), ('baz', 10))
+
+    >>> tuple(sorted_items(sample, reverse=True))
+    (('foo', 20), ('baz', 10), ('bar', 42))
+    """
+
+    # wrap the key func so it operates on the first element of each item
+    def pairkey_key(item):
+        return key(item[0])
+
+    return sorted(d.items(), key=pairkey_key, reverse=reverse)
+
+
+class KeyTransformingDict(dict):
+    """
+    A dict subclass that transforms the keys before they're used.
+    Subclasses may override the default transform_key to customize behavior.
+    """
+
+    @staticmethod
+    def transform_key(key):  # pragma: nocover
+        return key
+
+    def __init__(self, *args, **kargs):
+        super().__init__()
+        # build a dictionary using the default constructs
+        d = dict(*args, **kargs)
+        # build this dictionary using transformed keys.
+        for item in d.items():
+            self.__setitem__(*item)
+
+    def __setitem__(self, key, val):
+        key = self.transform_key(key)
+        super().__setitem__(key, val)
+
+    def __getitem__(self, key):
+        key = self.transform_key(key)
+        return super().__getitem__(key)
+
+    def __contains__(self, key):
+        key = self.transform_key(key)
+        return super().__contains__(key)
+
+    def __delitem__(self, key):
+        key = self.transform_key(key)
+        return super().__delitem__(key)
+
+    def get(self, key, *args, **kwargs):
+        key = self.transform_key(key)
+        return super().get(key, *args, **kwargs)
+
+    def setdefault(self, key, *args, **kwargs):
+        key = self.transform_key(key)
+        return super().setdefault(key, *args, **kwargs)
+
+    def pop(self, key, *args, **kwargs):
+        key = self.transform_key(key)
+        return super().pop(key, *args, **kwargs)
+
+    def matching_key_for(self, key):
+        """
+        Given a key, return the actual key stored in self that matches.
+        Raise KeyError if the key isn't found.
+        """
+        try:
+            return next(e_key for e_key in self.keys() if e_key == key)
+        except StopIteration as err:
+            raise KeyError(key) from err
+
+
+class FoldedCaseKeyedDict(KeyTransformingDict):
+    """
+    A case-insensitive dictionary (keys are compared as insensitive
+    if they are strings).
+
+    >>> d = FoldedCaseKeyedDict()
+    >>> d['heLlo'] = 'world'
+    >>> list(d.keys()) == ['heLlo']
+    True
+    >>> list(d.values()) == ['world']
+    True
+    >>> d['hello'] == 'world'
+    True
+    >>> 'hello' in d
+    True
+    >>> 'HELLO' in d
+    True
+    >>> print(repr(FoldedCaseKeyedDict({'heLlo': 'world'})))
+    {'heLlo': 'world'}
+    >>> d = FoldedCaseKeyedDict({'heLlo': 'world'})
+    >>> print(d['hello'])
+    world
+    >>> print(d['Hello'])
+    world
+    >>> list(d.keys())
+    ['heLlo']
+    >>> d = FoldedCaseKeyedDict({'heLlo': 'world', 'Hello': 'world'})
+    >>> list(d.values())
+    ['world']
+    >>> key, = d.keys()
+    >>> key in ['heLlo', 'Hello']
+    True
+    >>> del d['HELLO']
+    >>> d
+    {}
+
+    get should work
+
+    >>> d['Sumthin'] = 'else'
+    >>> d.get('SUMTHIN')
+    'else'
+    >>> d.get('OTHER', 'thing')
+    'thing'
+    >>> del d['sumthin']
+
+    setdefault should also work
+
+    >>> d['This'] = 'that'
+    >>> print(d.setdefault('this', 'other'))
+    that
+    >>> len(d)
+    1
+    >>> print(d['this'])
+    that
+    >>> print(d.setdefault('That', 'other'))
+    other
+    >>> print(d['THAT'])
+    other
+
+    Make it pop!
+
+    >>> print(d.pop('THAT'))
+    other
+
+    To retrieve the key in its originally-supplied form, use matching_key_for
+
+    >>> print(d.matching_key_for('this'))
+    This
+
+    >>> d.matching_key_for('missing')
+    Traceback (most recent call last):
+    ...
+    KeyError: 'missing'
+    """
+
+    @staticmethod
+    def transform_key(key):
+        return jaraco.text.FoldedCase(key)
+
+
+class DictAdapter:
+    """
+    Provide a getitem interface for attributes of an object.
+
+    Let's say you want to get at the string.lowercase property in a formatted
+    string. It's easy with DictAdapter.
+
+    >>> import string
+    >>> print("lowercase is %(ascii_lowercase)s" % DictAdapter(string))
+    lowercase is abcdefghijklmnopqrstuvwxyz
+    """
+
+    def __init__(self, wrapped_ob):
+        self.object = wrapped_ob
+
+    def __getitem__(self, name):
+        return getattr(self.object, name)
+
+
+class ItemsAsAttributes:
+    """
+    Mix-in class to enable a mapping object to provide items as
+    attributes.
+
+    >>> C = type('C', (dict, ItemsAsAttributes), dict())
+    >>> i = C()
+    >>> i['foo'] = 'bar'
+    >>> i.foo
+    'bar'
+
+    Natural attribute access takes precedence
+
+    >>> i.foo = 'henry'
+    >>> i.foo
+    'henry'
+
+    But as you might expect, the mapping functionality is preserved.
+
+    >>> i['foo']
+    'bar'
+
+    A normal attribute error should be raised if an attribute is
+    requested that doesn't exist.
+
+    >>> i.missing
+    Traceback (most recent call last):
+    ...
+    AttributeError: 'C' object has no attribute 'missing'
+
+    It also works on dicts that customize __getitem__
+
+    >>> missing_func = lambda self, key: 'missing item'
+    >>> C = type(
+    ...     'C',
+    ...     (dict, ItemsAsAttributes),
+    ...     dict(__missing__ = missing_func),
+    ... )
+    >>> i = C()
+    >>> i.missing
+    'missing item'
+    >>> i.foo
+    'missing item'
+    """
+
+    def __getattr__(self, key):
+        try:
+            return getattr(super(), key)
+        except AttributeError as e:
+            # attempt to get the value from the mapping (return self[key])
+            #  but be careful not to lose the original exception context.
+            noval = object()
+
+            def _safe_getitem(cont, key, missing_result):
+                try:
+                    return cont[key]
+                except KeyError:
+                    return missing_result
+
+            result = _safe_getitem(self, key, noval)
+            if result is not noval:
+                return result
+            # raise the original exception, but use the original class
+            #  name, not 'super'.
+            (message,) = e.args
+            message = message.replace('super', self.__class__.__name__, 1)
+            e.args = (message,)
+            raise
+
+
+def invert_map(map):
+    """
+    Given a dictionary, return another dictionary with keys and values
+    switched. If any of the values resolve to the same key, raises
+    a ValueError.
+
+    >>> numbers = dict(a=1, b=2, c=3)
+    >>> letters = invert_map(numbers)
+    >>> letters[1]
+    'a'
+    >>> numbers['d'] = 3
+    >>> invert_map(numbers)
+    Traceback (most recent call last):
+    ...
+    ValueError: Key conflict in inverted mapping
+    """
+    res = dict((v, k) for k, v in map.items())
+    if not len(res) == len(map):
+        raise ValueError('Key conflict in inverted mapping')
+    return res
+
+
+class IdentityOverrideMap(dict):
+    """
+    A dictionary that by default maps each key to itself, but otherwise
+    acts like a normal dictionary.
+
+    >>> d = IdentityOverrideMap()
+    >>> d[42]
+    42
+    >>> d['speed'] = 'speedo'
+    >>> print(d['speed'])
+    speedo
+    """
+
+    def __missing__(self, key):
+        return key
+
+
+class DictStack(list, collections.abc.MutableMapping):
+    """
+    A stack of dictionaries that behaves as a view on those dictionaries,
+    giving preference to the last.
+
+    >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)])
+    >>> stack['a']
+    2
+    >>> stack['b']
+    2
+    >>> stack['c']
+    2
+    >>> len(stack)
+    3
+    >>> stack.push(dict(a=3))
+    >>> stack['a']
+    3
+    >>> stack['a'] = 4
+    >>> set(stack.keys()) == set(['a', 'b', 'c'])
+    True
+    >>> set(stack.items()) == set([('a', 4), ('b', 2), ('c', 2)])
+    True
+    >>> dict(**stack) == dict(stack) == dict(a=4, c=2, b=2)
+    True
+    >>> d = stack.pop()
+    >>> stack['a']
+    2
+    >>> d = stack.pop()
+    >>> stack['a']
+    1
+    >>> stack.get('b', None)
+    >>> 'c' in stack
+    True
+    >>> del stack['c']
+    >>> dict(stack)
+    {'a': 1}
+    """
+
+    def __iter__(self):
+        dicts = list.__iter__(self)
+        return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts)))
+
+    def __getitem__(self, key):
+        for scope in reversed(tuple(list.__iter__(self))):
+            if key in scope:
+                return scope[key]
+        raise KeyError(key)
+
+    push = list.append
+
+    def __contains__(self, other):
+        return collections.abc.Mapping.__contains__(self, other)
+
+    def __len__(self):
+        return len(list(iter(self)))
+
+    def __setitem__(self, key, item):
+        last = list.__getitem__(self, -1)
+        return last.__setitem__(key, item)
+
+    def __delitem__(self, key):
+        last = list.__getitem__(self, -1)
+        return last.__delitem__(key)
+
+    # workaround for mypy confusion
+    def pop(self, *args, **kwargs):
+        return list.pop(self, *args, **kwargs)
+
+
+class BijectiveMap(dict):
+    """
+    A Bijective Map (two-way mapping).
+
+    Implemented as a simple dictionary of 2x the size, mapping values back
+    to keys.
+
+    Note, this implementation may be incomplete. If there's not a test for
+    your use case below, it's likely to fail, so please test and send pull
+    requests or patches for additional functionality needed.
+
+
+    >>> m = BijectiveMap()
+    >>> m['a'] = 'b'
+    >>> m == {'a': 'b', 'b': 'a'}
+    True
+    >>> print(m['b'])
+    a
+
+    >>> m['c'] = 'd'
+    >>> len(m)
+    2
+
+    Some weird things happen if you map an item to itself or overwrite a
+    single key of a pair, so it's disallowed.
+
+    >>> m['e'] = 'e'
+    Traceback (most recent call last):
+    ValueError: Key cannot map to itself
+
+    >>> m['d'] = 'e'
+    Traceback (most recent call last):
+    ValueError: Key/Value pairs may not overlap
+
+    >>> m['e'] = 'd'
+    Traceback (most recent call last):
+    ValueError: Key/Value pairs may not overlap
+
+    >>> print(m.pop('d'))
+    c
+
+    >>> 'c' in m
+    False
+
+    >>> m = BijectiveMap(dict(a='b'))
+    >>> len(m)
+    1
+    >>> print(m['b'])
+    a
+
+    >>> m = BijectiveMap()
+    >>> m.update(a='b')
+    >>> m['b']
+    'a'
+
+    >>> del m['b']
+    >>> len(m)
+    0
+    >>> 'a' in m
+    False
+    """
+
+    def __init__(self, *args, **kwargs):
+        super().__init__()
+        self.update(*args, **kwargs)
+
+    def __setitem__(self, item, value):
+        if item == value:
+            raise ValueError("Key cannot map to itself")
+        overlap = (
+            item in self
+            and self[item] != value
+            or value in self
+            and self[value] != item
+        )
+        if overlap:
+            raise ValueError("Key/Value pairs may not overlap")
+        super().__setitem__(item, value)
+        super().__setitem__(value, item)
+
+    def __delitem__(self, item):
+        self.pop(item)
+
+    def __len__(self):
+        return super().__len__() // 2
+
+    def pop(self, key, *args, **kwargs):
+        mirror = self[key]
+        super().__delitem__(mirror)
+        return super().pop(key, *args, **kwargs)
+
+    def update(self, *args, **kwargs):
+        # build a dictionary using the default constructs
+        d = dict(*args, **kwargs)
+        # build this dictionary using transformed keys.
+        for item in d.items():
+            self.__setitem__(*item)
+
+
+class FrozenDict(collections.abc.Mapping, collections.abc.Hashable):
+    """
+    An immutable mapping.
+
+    >>> a = FrozenDict(a=1, b=2)
+    >>> b = FrozenDict(a=1, b=2)
+    >>> a == b
+    True
+
+    >>> a == dict(a=1, b=2)
+    True
+    >>> dict(a=1, b=2) == a
+    True
+    >>> 'a' in a
+    True
+    >>> type(hash(a)) is type(0)
+    True
+    >>> set(iter(a)) == {'a', 'b'}
+    True
+    >>> len(a)
+    2
+    >>> a['a'] == a.get('a') == 1
+    True
+
+    >>> a['c'] = 3
+    Traceback (most recent call last):
+    ...
+    TypeError: 'FrozenDict' object does not support item assignment
+
+    >>> a.update(y=3)
+    Traceback (most recent call last):
+    ...
+    AttributeError: 'FrozenDict' object has no attribute 'update'
+
+    Copies should compare equal
+
+    >>> copy.copy(a) == a
+    True
+
+    Copies should be the same type
+
+    >>> isinstance(copy.copy(a), FrozenDict)
+    True
+
+    FrozenDict supplies .copy(), even though
+    collections.abc.Mapping doesn't demand it.
+
+    >>> a.copy() == a
+    True
+    >>> a.copy() is not a
+    True
+    """
+
+    __slots__ = ['__data']
+
+    def __new__(cls, *args, **kwargs):
+        self = super().__new__(cls)
+        self.__data = dict(*args, **kwargs)
+        return self
+
+    # Container
+    def __contains__(self, key):
+        return key in self.__data
+
+    # Hashable
+    def __hash__(self):
+        return hash(tuple(sorted(self.__data.items())))
+
+    # Mapping
+    def __iter__(self):
+        return iter(self.__data)
+
+    def __len__(self):
+        return len(self.__data)
+
+    def __getitem__(self, key):
+        return self.__data[key]
+
+    # override get for efficiency provided by dict
+    def get(self, *args, **kwargs):
+        return self.__data.get(*args, **kwargs)
+
+    # override eq to recognize underlying implementation
+    def __eq__(self, other):
+        if isinstance(other, FrozenDict):
+            other = other.__data
+        return self.__data.__eq__(other)
+
+    def copy(self):
+        "Return a shallow copy of self"
+        return copy.copy(self)
+
+
+class Enumeration(ItemsAsAttributes, BijectiveMap):
+    """
+    A convenient way to provide enumerated values
+
+    >>> e = Enumeration('a b c')
+    >>> e['a']
+    0
+
+    >>> e.a
+    0
+
+    >>> e[1]
+    'b'
+
+    >>> set(e.names) == set('abc')
+    True
+
+    >>> set(e.codes) == set(range(3))
+    True
+
+    >>> e.get('d') is None
+    True
+
+    Codes need not start with 0
+
+    >>> e = Enumeration('a b c', range(1, 4))
+    >>> e['a']
+    1
+
+    >>> e[3]
+    'c'
+    """
+
+    def __init__(self, names, codes=None):
+        if isinstance(names, str):
+            names = names.split()
+        if codes is None:
+            codes = itertools.count()
+        super().__init__(zip(names, codes))
+
+    @property
+    def names(self):
+        return (key for key in self if isinstance(key, str))
+
+    @property
+    def codes(self):
+        return (self[name] for name in self.names)
+
+
+class Everything:
+    """
+    A collection "containing" every possible thing.
+
+    >>> 'foo' in Everything()
+    True
+
+    >>> import random
+    >>> random.randint(1, 999) in Everything()
+    True
+
+    >>> random.choice([None, 'foo', 42, ('a', 'b', 'c')]) in Everything()
+    True
+    """
+
+    def __contains__(self, other):
+        return True
+
+
+class InstrumentedDict(collections.UserDict):
+    """
+    Instrument an existing dictionary with additional
+    functionality, but always reference and mutate
+    the original dictionary.
+
+    >>> orig = {'a': 1, 'b': 2}
+    >>> inst = InstrumentedDict(orig)
+    >>> inst['a']
+    1
+    >>> inst['c'] = 3
+    >>> orig['c']
+    3
+    >>> inst.keys() == orig.keys()
+    True
+    """
+
+    def __init__(self, data):
+        super().__init__()
+        self.data = data
+
+
+class Least:
+    """
+    A value that is always lesser than any other
+
+    >>> least = Least()
+    >>> 3 < least
+    False
+    >>> 3 > least
+    True
+    >>> least < 3
+    True
+    >>> least <= 3
+    True
+    >>> least > 3
+    False
+    >>> 'x' > least
+    True
+    >>> None > least
+    True
+    """
+
+    def __le__(self, other):
+        return True
+
+    __lt__ = __le__
+
+    def __ge__(self, other):
+        return False
+
+    __gt__ = __ge__
+
+
+class Greatest:
+    """
+    A value that is always greater than any other
+
+    >>> greatest = Greatest()
+    >>> 3 < greatest
+    True
+    >>> 3 > greatest
+    False
+    >>> greatest < 3
+    False
+    >>> greatest > 3
+    True
+    >>> greatest >= 3
+    True
+    >>> 'x' > greatest
+    False
+    >>> None > greatest
+    False
+    """
+
+    def __ge__(self, other):
+        return True
+
+    __gt__ = __ge__
+
+    def __le__(self, other):
+        return False
+
+    __lt__ = __le__
+
+
+def pop_all(items):
+    """
+    Clear items in place and return a copy of items.
+
+    >>> items = [1, 2, 3]
+    >>> popped = pop_all(items)
+    >>> popped is items
+    False
+    >>> popped
+    [1, 2, 3]
+    >>> items
+    []
+    """
+    result, items[:] = items[:], []
+    return result
+
+
+class FreezableDefaultDict(collections.defaultdict):
+    """
+    Often it is desirable to prevent the mutation of
+    a default dict after its initial construction, such
+    as to prevent mutation during iteration.
+
+    >>> dd = FreezableDefaultDict(list)
+    >>> dd[0].append('1')
+    >>> dd.freeze()
+    >>> dd[1]
+    []
+    >>> len(dd)
+    1
+    """
+
+    def __missing__(self, key):
+        return getattr(self, '_frozen', super().__missing__)(key)
+
+    def freeze(self):
+        self._frozen = lambda key: self.default_factory()
+
+
+class Accumulator:
+    def __init__(self, initial=0):
+        self.val = initial
+
+    def __call__(self, val):
+        self.val += val
+        return self.val
+
+
+class WeightedLookup(RangeMap):
+    """
+    Given parameters suitable for a dict representing keys
+    and a weighted proportion, return a RangeMap representing
+    spans of values proportial to the weights:
+
+    >>> even = WeightedLookup(a=1, b=1)
+
+    [0, 1) -> a
+    [1, 2) -> b
+
+    >>> lk = WeightedLookup(a=1, b=2)
+
+    [0, 1) -> a
+    [1, 3) -> b
+
+    >>> lk[.5]
+    'a'
+    >>> lk[1.5]
+    'b'
+
+    Adds ``.random()`` to select a random weighted value:
+
+    >>> lk.random() in ['a', 'b']
+    True
+
+    >>> choices = [lk.random() for x in range(1000)]
+
+    Statistically speaking, choices should be .5 a:b
+    >>> ratio = choices.count('a') / choices.count('b')
+    >>> .4 < ratio < .6
+    True
+    """
+
+    def __init__(self, *args, **kwargs):
+        raw = dict(*args, **kwargs)
+
+        # allocate keys by weight
+        indexes = map(Accumulator(), raw.values())
+        super().__init__(zip(indexes, raw.keys()), key_match_comparator=operator.lt)
+
+    def random(self):
+        lower, upper = self.bounds()
+        selector = random.random() * upper
+        return self[selector]
diff --git a/setuptools/_distutils/_vendor/packaging/py.typed b/setuptools/_vendor/jaraco/collections/py.typed
similarity index 100%
rename from setuptools/_distutils/_vendor/packaging/py.typed
rename to setuptools/_vendor/jaraco/collections/py.typed
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py
index 46c0a231eb..3f7fc17a88 100644
--- a/setuptools/command/easy_install.py
+++ b/setuptools/command/easy_install.py
@@ -34,7 +34,7 @@
 from collections.abc import Iterable
 from glob import glob
 from sysconfig import get_path
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Callable, TypeVar
 
 from jaraco.text import yield_lines
 
@@ -63,7 +63,7 @@
 from setuptools.wheel import Wheel
 
 from .._path import ensure_directory
-from ..compat import py39, py311
+from ..compat import py39, py311, py312
 
 from distutils import dir_util, log
 from distutils.command import install
@@ -89,6 +89,8 @@
     'get_exe_prefixes',
 ]
 
+_T = TypeVar("_T")
+
 
 def is_64bit():
     return struct.calcsize("P") == 8
@@ -588,8 +590,9 @@ def check_pth_processing(self):  # noqa: C901
                 os.unlink(ok_file)
             dirname = os.path.dirname(ok_file)
             os.makedirs(dirname, exist_ok=True)
-            f = open(pth_file, 'w', encoding=py39.LOCALE_ENCODING)
-            # ^-- Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
+            f = open(pth_file, 'w', encoding=py312.PTH_ENCODING)
+            # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
+            #     see python/cpython#77102.
         except OSError:
             self.cant_write_to_target()
         else:
@@ -1280,8 +1283,9 @@ def update_pth(self, dist):  # noqa: C901  # is too complex (11)  # FIXME
         if os.path.islink(filename):
             os.unlink(filename)
 
-        with open(filename, 'wt', encoding=py39.LOCALE_ENCODING) as f:
-            # Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
+        with open(filename, 'wt', encoding=py312.PTH_ENCODING) as f:
+            # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
+            #     see python/cpython#77102.
             f.write(self.pth_file.make_relative(dist.location) + '\n')
 
     def unpack_progress(self, src, dst):
@@ -1507,9 +1511,8 @@ def expand_paths(inputs):  # noqa: C901  # is too complex (11)  # FIXME
                 continue
 
             # Read the .pth file
-            with open(os.path.join(dirname, name), encoding=py39.LOCALE_ENCODING) as f:
-                # Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
-                lines = list(yield_lines(f))
+            content = _read_pth(os.path.join(dirname, name))
+            lines = list(yield_lines(content))
 
             # Yield existing non-dupe, non-import directory lines from it
             for line in lines:
@@ -1623,9 +1626,8 @@ def _load_raw(self):
         paths = []
         dirty = saw_import = False
         seen = set(self.sitedirs)
-        f = open(self.filename, 'rt', encoding=py39.LOCALE_ENCODING)
-        # ^-- Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
-        for line in f:
+        content = _read_pth(self.filename)
+        for line in content.splitlines():
             path = line.rstrip()
             # still keep imports and empty/commented lines for formatting
             paths.append(path)
@@ -1644,7 +1646,6 @@ def _load_raw(self):
                 paths.pop()
                 continue
             seen.add(normalized_path)
-        f.close()
         # remove any trailing empty/blank line
         while paths and not paths[-1].strip():
             paths.pop()
@@ -1695,8 +1696,9 @@ def save(self):
             data = '\n'.join(lines) + '\n'
             if os.path.islink(self.filename):
                 os.unlink(self.filename)
-            with open(self.filename, 'wt', encoding=py39.LOCALE_ENCODING) as f:
-                # Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
+            with open(self.filename, 'wt', encoding=py312.PTH_ENCODING) as f:
+                # ^-- Python<3.13 require encoding="locale" instead of "utf-8",
+                #     see python/cpython#77102.
                 f.write(data)
         elif os.path.exists(self.filename):
             log.debug("Deleting empty %s", self.filename)
@@ -1786,13 +1788,14 @@ def _first_line_re():
     return re.compile(first_line_re.pattern.decode())
 
 
-def auto_chmod(func, arg, exc):
+# Must match shutil._OnExcCallback
+def auto_chmod(func: Callable[..., _T], arg: str, exc: BaseException) -> _T:
+    """shutils onexc callback to automatically call chmod for certain functions."""
+    # Only retry for scenarios known to have an issue
     if func in [os.unlink, os.remove] and os.name == 'nt':
         chmod(arg, stat.S_IWRITE)
         return func(arg)
-    et, ev, _ = sys.exc_info()
-    # TODO: This code doesn't make sense. What is it trying to do?
-    raise (ev[0], ev[1] + (" %s %s" % (func, arg)))  # pyright: ignore[reportOptionalSubscript, reportIndexIssue]
+    raise exc
 
 
 def update_dist_caches(dist_path, fix_zipimporter_caches):
@@ -2070,8 +2073,7 @@ def from_param(cls, param: Self | str | Iterable[str] | None) -> Self:
             return cls(param)
         if param is None:
             return cls.from_environment()
-        # AttributeError to keep backwards compatibility, this should really be a TypeError though
-        raise AttributeError(f"Argument has an unsupported type {type(param)}")
+        raise TypeError(f"Argument has an unsupported type {type(param)}")
 
     @classmethod
     def from_environment(cls):
@@ -2355,6 +2357,26 @@ def only_strs(values):
     return filter(lambda val: isinstance(val, str), values)
 
 
+def _read_pth(fullname: str) -> str:
+    # Python<3.13 require encoding="locale" instead of "utf-8", see python/cpython#77102
+    # In the case old versions of setuptools are producing `pth` files with
+    # different encodings that might be problematic... So we fallback to "locale".
+
+    try:
+        with open(fullname, encoding=py312.PTH_ENCODING) as f:
+            return f.read()
+    except UnicodeDecodeError:  # pragma: no cover
+        # This error may only happen for Python >= 3.13
+        # TODO: Possible deprecation warnings to be added in the future:
+        #       ``.pth file {fullname!r} is not UTF-8.``
+        #       Your environment contain {fullname!r} that cannot be read as UTF-8.
+        #       This is likely to have been produced with an old version of setuptools.
+        #       Please be mindful that this is deprecated and in the future, non-utf8
+        #       .pth files may cause setuptools to fail.
+        with open(fullname, encoding=py39.LOCALE_ENCODING) as f:
+            return f.read()
+
+
 class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
     _SUMMARY = "easy_install command is deprecated."
     _DETAILS = """
diff --git a/setuptools/command/editable_wheel.py b/setuptools/command/editable_wheel.py
index 4a8b3abb43..2b21eacbad 100644
--- a/setuptools/command/editable_wheel.py
+++ b/setuptools/command/editable_wheel.py
@@ -28,7 +28,7 @@
 
 from .. import Command, _normalization, _path, errors, namespaces
 from .._path import StrPath
-from ..compat import py39
+from ..compat import py312
 from ..discovery import find_package_path
 from ..dist import Distribution
 from ..warnings import InformationOnly, SetuptoolsDeprecationWarning, SetuptoolsWarning
@@ -561,7 +561,9 @@ def __exit__(
 
 
 def _encode_pth(content: str) -> bytes:
-    """.pth files are always read with 'locale' encoding, the recommendation
+    """
+    Prior to Python 3.13 (see https://github.com/python/cpython/issues/77102),
+    .pth files are always read with 'locale' encoding, the recommendation
     from the cpython core developers is to write them as ``open(path, "w")``
     and ignore warnings (see python/cpython#77102, pypa/setuptools#3937).
     This function tries to simulate this behaviour without having to create an
@@ -571,7 +573,8 @@ def _encode_pth(content: str) -> bytes:
     or ``locale.getencoding()``).
     """
     with io.BytesIO() as buffer:
-        wrapper = io.TextIOWrapper(buffer, encoding=py39.LOCALE_ENCODING)
+        wrapper = io.TextIOWrapper(buffer, encoding=py312.PTH_ENCODING)
+        # TODO: Python 3.13 replace the whole function with `bytes(content, "utf-8")`
         wrapper.write(content)
         wrapper.flush()
         buffer.seek(0)
diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py
index 09255a3240..280eb5e807 100644
--- a/setuptools/command/egg_info.py
+++ b/setuptools/command/egg_info.py
@@ -606,16 +606,6 @@ def _add_referenced_files(self):
             log.debug("adding file referenced by config '%s'", rf)
         self.filelist.extend(referenced)
 
-    def prune_file_list(self):
-        build = self.get_finalized_command('build')
-        base_dir = self.distribution.get_fullname()
-        self.filelist.prune(build.build_base)
-        self.filelist.prune(base_dir)
-        sep = re.escape(os.sep)
-        self.filelist.exclude_pattern(
-            r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=True
-        )
-
     def _safe_data_files(self, build_py):
         """
         The parent class implementation of this method
diff --git a/setuptools/command/register.py b/setuptools/command/register.py
deleted file mode 100644
index 93ef04aa0e..0000000000
--- a/setuptools/command/register.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from setuptools.errors import RemovedCommandError
-
-from ..dist import Distribution
-
-import distutils.command.register as orig
-from distutils import log
-
-
-class register(orig.register):
-    """Formerly used to register packages on PyPI."""
-
-    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
-
-    def run(self):
-        msg = (
-            "The register command has been removed, use twine to upload "
-            "instead (https://pypi.org/p/twine)"
-        )
-
-        self.announce("ERROR: " + msg, log.ERROR)
-
-        raise RemovedCommandError(msg)
diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py
index 68afab89b4..fa9a2c4d81 100644
--- a/setuptools/command/sdist.py
+++ b/setuptools/command/sdist.py
@@ -2,6 +2,7 @@
 
 import contextlib
 import os
+import re
 from itertools import chain
 
 from .._importlib import metadata
@@ -156,6 +157,12 @@ def _add_defaults_data_files(self):
         except TypeError:
             log.warn("data_files contains unexpected objects")
 
+    def prune_file_list(self):
+        super().prune_file_list()
+        # Prevent accidental inclusion of test-related cache dirs at the project root
+        sep = re.escape(os.sep)
+        self.filelist.exclude_pattern(r"^(\.tox|\.nox|\.venv)" + sep, is_regex=True)
+
     def check_readme(self):
         for f in self.READMES:
             if os.path.exists(f):
diff --git a/setuptools/command/upload.py b/setuptools/command/upload.py
deleted file mode 100644
index 649b41fa30..0000000000
--- a/setuptools/command/upload.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from setuptools.dist import Distribution
-from setuptools.errors import RemovedCommandError
-
-from distutils import log
-from distutils.command import upload as orig
-
-
-class upload(orig.upload):
-    """Formerly used to upload packages to PyPI."""
-
-    distribution: Distribution  # override distutils.dist.Distribution with setuptools.dist.Distribution
-
-    def run(self):
-        msg = (
-            "The upload command has been removed, use twine to upload "
-            "instead (https://pypi.org/p/twine)"
-        )
-
-        self.announce("ERROR: " + msg, log.ERROR)
-        raise RemovedCommandError(msg)
diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py
deleted file mode 100644
index 3c2946cfc8..0000000000
--- a/setuptools/command/upload_docs.py
+++ /dev/null
@@ -1,221 +0,0 @@
-"""upload_docs
-
-Implements a Distutils 'upload_docs' subcommand (upload documentation to
-sites other than PyPi such as devpi).
-"""
-
-import functools
-import http.client
-import itertools
-import os
-import shutil
-import tempfile
-import urllib.parse
-import zipfile
-from base64 import standard_b64encode
-
-from .._importlib import metadata
-from ..warnings import SetuptoolsDeprecationWarning
-from .upload import upload
-
-from distutils import log
-from distutils.errors import DistutilsOptionError
-
-
-def _encode(s):
-    return s.encode('utf-8', 'surrogateescape')
-
-
-class upload_docs(upload):
-    # override the default repository as upload_docs isn't
-    # supported by Warehouse (and won't be).
-    DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
-
-    description = 'Upload documentation to sites other than PyPi such as devpi'
-
-    user_options = [
-        (
-            'repository=',
-            'r',
-            "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY,
-        ),
-        ('show-response', None, 'display full response text from server'),
-        ('upload-dir=', None, 'directory to upload'),
-    ]
-    boolean_options = upload.boolean_options
-
-    def has_sphinx(self):
-        return bool(
-            self.upload_dir is None
-            and metadata.entry_points(group='distutils.commands', name='build_sphinx')
-        )
-
-    sub_commands = [('build_sphinx', has_sphinx)]
-
-    def initialize_options(self):
-        upload.initialize_options(self)
-        self.upload_dir = None
-        self.target_dir = None
-
-    def finalize_options(self):
-        log.warn(
-            "Upload_docs command is deprecated. Use Read the Docs "
-            "(https://readthedocs.org) instead."
-        )
-        upload.finalize_options(self)
-        if self.upload_dir is None:
-            if self.has_sphinx():
-                build_sphinx = self.get_finalized_command('build_sphinx')
-                self.target_dir = dict(build_sphinx.builder_target_dirs)['html']
-            else:
-                build = self.get_finalized_command('build')
-                self.target_dir = os.path.join(build.build_base, 'docs')
-        else:
-            self.ensure_dirname('upload_dir')
-            self.target_dir = self.upload_dir
-        self.announce('Using upload directory %s' % self.target_dir)
-
-    def create_zipfile(self, filename):
-        zip_file = zipfile.ZipFile(filename, "w")
-        try:
-            self.mkpath(self.target_dir)  # just in case
-            for root, dirs, files in os.walk(self.target_dir):
-                if root == self.target_dir and not files:
-                    tmpl = "no files found in upload directory '%s'"
-                    raise DistutilsOptionError(tmpl % self.target_dir)
-                for name in files:
-                    full = os.path.join(root, name)
-                    relative = root[len(self.target_dir) :].lstrip(os.path.sep)
-                    dest = os.path.join(relative, name)
-                    zip_file.write(full, dest)
-        finally:
-            zip_file.close()
-
-    def run(self):
-        SetuptoolsDeprecationWarning.emit(
-            "Deprecated command",
-            """
-            upload_docs is deprecated and will be removed in a future version.
-            Instead, use tools like devpi and Read the Docs; or lower level tools like
-            httpie and curl to interact directly with your hosting service API.
-            """,
-            due_date=(2023, 9, 26),  # warning introduced in 27 Jul 2022
-        )
-
-        # Run sub commands
-        for cmd_name in self.get_sub_commands():
-            self.run_command(cmd_name)
-
-        tmp_dir = tempfile.mkdtemp()
-        name = self.distribution.metadata.get_name()
-        zip_file = os.path.join(tmp_dir, "%s.zip" % name)
-        try:
-            self.create_zipfile(zip_file)
-            self.upload_file(zip_file)
-        finally:
-            shutil.rmtree(tmp_dir)
-
-    @staticmethod
-    def _build_part(item, sep_boundary):
-        key, values = item
-        title = '\nContent-Disposition: form-data; name="%s"' % key
-        # handle multiple entries for the same name
-        if not isinstance(values, list):
-            values = [values]
-        for value in values:
-            if isinstance(value, tuple):
-                title += '; filename="%s"' % value[0]
-                value = value[1]
-            else:
-                value = _encode(value)
-            yield sep_boundary
-            yield _encode(title)
-            yield b"\n\n"
-            yield value
-            if value and value[-1:] == b'\r':
-                yield b'\n'  # write an extra newline (lurve Macs)
-
-    @classmethod
-    def _build_multipart(cls, data):
-        """
-        Build up the MIME payload for the POST data
-        """
-        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
-        sep_boundary = b'\n--' + boundary.encode('ascii')
-        end_boundary = sep_boundary + b'--'
-        end_items = (
-            end_boundary,
-            b"\n",
-        )
-        builder = functools.partial(
-            cls._build_part,
-            sep_boundary=sep_boundary,
-        )
-        part_groups = map(builder, data.items())
-        parts = itertools.chain.from_iterable(part_groups)
-        body_items = itertools.chain(parts, end_items)
-        content_type = 'multipart/form-data; boundary=%s' % boundary
-        return b''.join(body_items), content_type
-
-    def upload_file(self, filename):
-        with open(filename, 'rb') as f:
-            content = f.read()
-        meta = self.distribution.metadata
-        data = {
-            ':action': 'doc_upload',
-            'name': meta.get_name(),
-            'content': (os.path.basename(filename), content),
-        }
-        # set up the authentication
-        credentials = _encode(self.username + ':' + self.password)
-        credentials = standard_b64encode(credentials).decode('ascii')
-        auth = "Basic " + credentials
-
-        body, ct = self._build_multipart(data)
-
-        msg = "Submitting documentation to %s" % (self.repository)
-        self.announce(msg, log.INFO)
-
-        # build the Request
-        # We can't use urllib2 since we need to send the Basic
-        # auth right with the first request
-        schema, netloc, url, params, query, fragments = urllib.parse.urlparse(
-            self.repository
-        )
-        assert not params and not query and not fragments
-        if schema == 'http':
-            conn = http.client.HTTPConnection(netloc)
-        elif schema == 'https':
-            conn = http.client.HTTPSConnection(netloc)
-        else:
-            raise AssertionError("unsupported schema " + schema)
-
-        data = ''
-        try:
-            conn.connect()
-            conn.putrequest("POST", url)
-            content_type = ct
-            conn.putheader('Content-type', content_type)
-            conn.putheader('Content-length', str(len(body)))
-            conn.putheader('Authorization', auth)
-            conn.endheaders()
-            conn.send(body)
-        except OSError as e:
-            self.announce(str(e), log.ERROR)
-            return
-
-        r = conn.getresponse()
-        if r.status == 200:
-            msg = 'Server response (%s): %s' % (r.status, r.reason)
-            self.announce(msg, log.INFO)
-        elif r.status == 301:
-            location = r.getheader('Location')
-            if location is None:
-                location = 'https://pythonhosted.org/%s/' % meta.get_name()
-            msg = 'Upload successful. Visit %s' % location
-            self.announce(msg, log.INFO)
-        else:
-            msg = 'Upload failed (%s): %s' % (r.status, r.reason)
-            self.announce(msg, log.ERROR)
-        if self.show_response:
-            print('-' * 75, r.read(), '-' * 75)
diff --git a/setuptools/compat/py312.py b/setuptools/compat/py312.py
new file mode 100644
index 0000000000..b20c5f697a
--- /dev/null
+++ b/setuptools/compat/py312.py
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+import sys
+
+if sys.version_info >= (3, 12, 4):
+    # Python 3.13 should support `.pth` files encoded in UTF-8
+    # See discussion in https://github.com/python/cpython/issues/77102
+    PTH_ENCODING: str | None = "utf-8"
+else:
+    from .py39 import LOCALE_ENCODING
+
+    # PTH_ENCODING = "locale"
+    PTH_ENCODING = LOCALE_ENCODING
diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py
index 7b9c0b1a59..23179f3548 100644
--- a/setuptools/config/_apply_pyprojecttoml.py
+++ b/setuptools/config/_apply_pyprojecttoml.py
@@ -17,10 +17,11 @@
 from inspect import cleandoc
 from itertools import chain
 from types import MappingProxyType
-from typing import TYPE_CHECKING, Any, Callable, Dict, Mapping, Union
+from typing import TYPE_CHECKING, Any, Callable, Dict, Mapping, TypeVar, Union
 
 from .._path import StrPath
 from ..errors import RemovedConfigError
+from ..extension import Extension
 from ..warnings import SetuptoolsWarning
 
 if TYPE_CHECKING:
@@ -35,6 +36,7 @@
 _ProjectReadmeValue: TypeAlias = Union[str, Dict[str, str]]
 _CorrespFn: TypeAlias = Callable[["Distribution", Any, StrPath], None]
 _Correspondence: TypeAlias = Union[str, _CorrespFn]
+_T = TypeVar("_T")
 
 _logger = logging.getLogger(__name__)
 
@@ -117,13 +119,14 @@ def json_compatible_key(key: str) -> str:
 
 
 def _set_config(dist: Distribution, field: str, value: Any):
+    val = _PREPROCESS.get(field, _noop)(dist, value)
     setter = getattr(dist.metadata, f"set_{field}", None)
     if setter:
-        setter(value)
+        setter(val)
     elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES:
-        setattr(dist.metadata, field, value)
+        setattr(dist.metadata, field, val)
     else:
-        setattr(dist, field, value)
+        setattr(dist, field, val)
 
 
 _CONTENT_TYPES = {
@@ -218,6 +221,17 @@ def _optional_dependencies(dist: Distribution, val: dict, _root_dir):
     dist.extras_require = {**existing, **val}
 
 
+def _ext_modules(dist: Distribution, val: list[dict]) -> list[Extension]:
+    existing = dist.ext_modules or []
+    args = ({k.replace("-", "_"): v for k, v in x.items()} for x in val)
+    new = [Extension(**kw) for kw in args]
+    return [*existing, *new]
+
+
+def _noop(_dist: Distribution, val: _T) -> _T:
+    return val
+
+
 def _unify_entry_points(project_table: dict):
     project = project_table
     entry_points = project.pop("entry-points", project.pop("entry_points", {}))
@@ -376,6 +390,10 @@ def _acessor(obj):
     "license_files",
 }
 
+_PREPROCESS = {
+    "ext_modules": _ext_modules,
+}
+
 _PREVIOUSLY_DEFINED = {
     "name": _attrgetter("metadata.name"),
     "version": _attrgetter("metadata.version"),
diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
index 7e403af4b7..42e7aa5e33 100644
--- a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
+++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
@@ -31,7 +31,7 @@ def validate(data, custom_formats={}, name_prefix=None):
 
 def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -86,7 +86,7 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_bui
             data_keys.remove("tool")
             data__tool = data["tool"]
             if not isinstance(data__tool, (dict)):
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
             data__tool_is_dict = isinstance(data__tool, dict)
             if data__tool_is_dict:
                 data__tool_keys = set(data__tool.keys())
@@ -99,12 +99,12 @@ def validate_https___packaging_python_org_en_latest_specifications_declaring_bui
                     data__tool__setuptools = data__tool["setuptools"]
                     validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools")
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in  :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html', 'title': '``tool.distutils`` table', '$$description': ['**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``', 'subtables to configure arguments for ``distutils`` commands.', 'Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` commands via `distutils configuration files', '`_.', 'See also `the old Python docs _`.'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$ref': '#/definitions/package-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$ref': '#/definitions/package-name'}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$ref': '#/definitions/ext-module'}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive-for-dependencies'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive-for-dependencies'}}}, 'readme': {'type': 'object', 'anyOf': [{'$ref': '#/definitions/file-directive'}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'$ref': '#/definitions/file-directive/properties/file'}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://packaging.python.org/en/latest/specifications/pyproject-toml/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '`_.']}, 'license': {'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier `_.'}, '$$description': ['`Trove classifiers `_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points `_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points `_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', '    If the core metadata specification lists a field as "Required", then', '    the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', '    The required fields are: Metadata-Version, Name, Version.', '    All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://peps.python.org/pep-0621/#authors-maintainers', 'type': 'object', 'additionalProperties': False, 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '`_', 'and `setuptools docs', '`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
-        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
     data_is_dict = isinstance(data, dict)
     if data_is_dict:
         data_keys = set(data.keys())
@@ -366,6 +366,16 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                     if isinstance(data__pymodules_item, str):
                         if not custom_formats["python-module-name-relaxed"](data__pymodules_item):
                             raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name-relaxed", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
+        if "ext-modules" in data_keys:
+            data_keys.remove("ext-modules")
+            data__extmodules = data["ext-modules"]
+            if not isinstance(data__extmodules, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".ext-modules must be array", value=data__extmodules, name="" + (name_prefix or "data") + ".ext-modules", definition={'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}}, rule='type')
+            data__extmodules_is_list = isinstance(data__extmodules, (list, tuple))
+            if data__extmodules_is_list:
+                data__extmodules_len = len(data__extmodules)
+                for data__extmodules_x, data__extmodules_item in enumerate(data__extmodules):
+                    validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_ext_module(data__extmodules_item, custom_formats, (name_prefix or "data") + ".ext-modules[{data__extmodules_x}]".format(**locals()))
         if "data-files" in data_keys:
             data_keys.remove("data-files")
             data__datafiles = data["data-files"]
@@ -524,7 +534,7 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
                 if data__dynamic_keys:
                     raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}, rule='additionalProperties')
         if data_keys:
-            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema#', '$id': 'https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html', 'title': '``tool.setuptools`` table', '$$description': ['``setuptools``-specific configurations that can be set by users that require', 'customization.', 'These configurations are completely optional and probably can be skipped when', 'creating simple packages. They are equivalent to some of the `Keywords', '`_', 'used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.', 'It considers only ``setuptools`` `parameters', '`_', 'that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``', 'and ``setup_requires`` (incompatible with modern workflows/standards).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'$$description': ['Whether the project can be safely installed and run from a zip file.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'boolean'}, 'script-files': {'$$description': ['Legacy way of defining scripts (entry-points are preferred).', 'Equivalent to the ``script`` keyword in ``setup.py``', '(it was renamed to avoid confusion with entry-point based ``project.scripts``', 'defined in :pep:`621`).', '**DISCOURAGED**: generic script wrappers are tricky and may not work properly.', 'Whenever possible, please use ``project.scripts`` instead.'], 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.', '**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and', '``setup.py install`` in the context of ``eggs`` (**DEPRECATED**).'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'const': ''}, {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'anyOf': [{'type': 'string', 'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html', 'description': '**DEPRECATED**: use implicit namespaces instead (:pep:`420`).'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name-relaxed'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'ext-modules': {'description': 'Extension modules to be compiled by setuptools', 'type': 'array', 'items': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}}, 'data-files': {'$$description': ['``dict``-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', '**DISCOURAGED**: please notice this might not work as expected with wheels.', 'Whenever possible, consider using data files inside the package directories', '(or create a new namespace package that only contains data files).', 'See `data files support', '`_.'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', '    cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['**PROVISIONAL**: list of glob patterns for all license files being distributed.', '(likely to become standard with :pep:`639`).', "By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.', 'Also ensure to set ``project.dynamic`` accordingly.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'type': 'string', 'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$ref': '#/definitions/file-directive'}]}}}, 'readme': {'type': 'object', 'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'type': 'object', 'properties': {'content-type': {'type': 'string'}, 'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'additionalProperties': False}], 'required': ['file']}}}}, 'definitions': {'package-name': {'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, 'ext-module': {'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, 'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'file-directive-for-dependencies': {'title': "'file:' directive for dependencies", 'allOf': [{'$$description': ['**BETA**: subset of the ``requirements.txt`` format', 'without ``pip`` flags and options', '(one :pep:`508`-compliant string per line,', 'lines that are blank or start with ``#`` are excluded).', 'See `dynamic metadata', '`_.']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
     return data
 
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_file_directive_properties_file(data, custom_formats={}, name_prefix=None):
@@ -613,6 +623,211 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
             raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string', 'format': 'python-qualified-identifier'}}, 'required': ['attr']}, rule='additionalProperties')
     return data
 
+def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_ext_module(data, custom_formats={}, name_prefix=None):
+    if not isinstance(data, (dict)):
+        raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, rule='type')
+    data_is_dict = isinstance(data, dict)
+    if data_is_dict:
+        data__missing_keys = set(['name', 'sources']) - data.keys()
+        if data__missing_keys:
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain " + (str(sorted(data__missing_keys)) + " properties"), value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, rule='required')
+        data_keys = set(data.keys())
+        if "name" in data_keys:
+            data_keys.remove("name")
+            data__name = data["name"]
+            if not isinstance(data__name, (str)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='type')
+            if isinstance(data__name, str):
+                if not custom_formats["python-module-name-relaxed"](data__name):
+                    raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be python-module-name-relaxed", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
+        if "sources" in data_keys:
+            data_keys.remove("sources")
+            data__sources = data["sources"]
+            if not isinstance(data__sources, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".sources must be array", value=data__sources, name="" + (name_prefix or "data") + ".sources", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__sources_is_list = isinstance(data__sources, (list, tuple))
+            if data__sources_is_list:
+                data__sources_len = len(data__sources)
+                for data__sources_x, data__sources_item in enumerate(data__sources):
+                    if not isinstance(data__sources_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".sources[{data__sources_x}]".format(**locals()) + " must be string", value=data__sources_item, name="" + (name_prefix or "data") + ".sources[{data__sources_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "include-dirs" in data_keys:
+            data_keys.remove("include-dirs")
+            data__includedirs = data["include-dirs"]
+            if not isinstance(data__includedirs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-dirs must be array", value=data__includedirs, name="" + (name_prefix or "data") + ".include-dirs", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__includedirs_is_list = isinstance(data__includedirs, (list, tuple))
+            if data__includedirs_is_list:
+                data__includedirs_len = len(data__includedirs)
+                for data__includedirs_x, data__includedirs_item in enumerate(data__includedirs):
+                    if not isinstance(data__includedirs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-dirs[{data__includedirs_x}]".format(**locals()) + " must be string", value=data__includedirs_item, name="" + (name_prefix or "data") + ".include-dirs[{data__includedirs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "define-macros" in data_keys:
+            data_keys.remove("define-macros")
+            data__definemacros = data["define-macros"]
+            if not isinstance(data__definemacros, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros must be array", value=data__definemacros, name="" + (name_prefix or "data") + ".define-macros", definition={'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, rule='type')
+            data__definemacros_is_list = isinstance(data__definemacros, (list, tuple))
+            if data__definemacros_is_list:
+                data__definemacros_len = len(data__definemacros)
+                for data__definemacros_x, data__definemacros_item in enumerate(data__definemacros):
+                    if not isinstance(data__definemacros_item, (list, tuple)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + " must be array", value=data__definemacros_item, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + "", definition={'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}, rule='type')
+                    data__definemacros_item_is_list = isinstance(data__definemacros_item, (list, tuple))
+                    if data__definemacros_item_is_list:
+                        data__definemacros_item_len = len(data__definemacros_item)
+                        if data__definemacros_item_len > 0:
+                            data__definemacros_item__0 = data__definemacros_item[0]
+                            if not isinstance(data__definemacros_item__0, (str)):
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][0]".format(**locals()) + " must be string", value=data__definemacros_item__0, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][0]".format(**locals()) + "", definition={'description': 'macro name', 'type': 'string'}, rule='type')
+                        if data__definemacros_item_len > 1:
+                            data__definemacros_item__1 = data__definemacros_item[1]
+                            data__definemacros_item__1_one_of_count9 = 0
+                            if data__definemacros_item__1_one_of_count9 < 2:
+                                try:
+                                    if not isinstance(data__definemacros_item__1, (str)):
+                                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be string", value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+                                    data__definemacros_item__1_one_of_count9 += 1
+                                except JsonSchemaValueException: pass
+                            if data__definemacros_item__1_one_of_count9 < 2:
+                                try:
+                                    if not isinstance(data__definemacros_item__1, (NoneType)):
+                                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be null", value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'type': 'null'}, rule='type')
+                                    data__definemacros_item__1_one_of_count9 += 1
+                                except JsonSchemaValueException: pass
+                            if data__definemacros_item__1_one_of_count9 != 1:
+                                raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + " must be valid exactly by one definition" + (" (" + str(data__definemacros_item__1_one_of_count9) + " matches found)"), value=data__definemacros_item__1, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}][1]".format(**locals()) + "", definition={'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}, rule='oneOf')
+                        if data__definemacros_item_len > 2:
+                            raise JsonSchemaValueException("" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + " must contain only specified items", value=data__definemacros_item, name="" + (name_prefix or "data") + ".define-macros[{data__definemacros_x}]".format(**locals()) + "", definition={'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}, rule='items')
+        if "undef-macros" in data_keys:
+            data_keys.remove("undef-macros")
+            data__undefmacros = data["undef-macros"]
+            if not isinstance(data__undefmacros, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".undef-macros must be array", value=data__undefmacros, name="" + (name_prefix or "data") + ".undef-macros", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__undefmacros_is_list = isinstance(data__undefmacros, (list, tuple))
+            if data__undefmacros_is_list:
+                data__undefmacros_len = len(data__undefmacros)
+                for data__undefmacros_x, data__undefmacros_item in enumerate(data__undefmacros):
+                    if not isinstance(data__undefmacros_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".undef-macros[{data__undefmacros_x}]".format(**locals()) + " must be string", value=data__undefmacros_item, name="" + (name_prefix or "data") + ".undef-macros[{data__undefmacros_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "library-dirs" in data_keys:
+            data_keys.remove("library-dirs")
+            data__librarydirs = data["library-dirs"]
+            if not isinstance(data__librarydirs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".library-dirs must be array", value=data__librarydirs, name="" + (name_prefix or "data") + ".library-dirs", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__librarydirs_is_list = isinstance(data__librarydirs, (list, tuple))
+            if data__librarydirs_is_list:
+                data__librarydirs_len = len(data__librarydirs)
+                for data__librarydirs_x, data__librarydirs_item in enumerate(data__librarydirs):
+                    if not isinstance(data__librarydirs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".library-dirs[{data__librarydirs_x}]".format(**locals()) + " must be string", value=data__librarydirs_item, name="" + (name_prefix or "data") + ".library-dirs[{data__librarydirs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "libraries" in data_keys:
+            data_keys.remove("libraries")
+            data__libraries = data["libraries"]
+            if not isinstance(data__libraries, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".libraries must be array", value=data__libraries, name="" + (name_prefix or "data") + ".libraries", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__libraries_is_list = isinstance(data__libraries, (list, tuple))
+            if data__libraries_is_list:
+                data__libraries_len = len(data__libraries)
+                for data__libraries_x, data__libraries_item in enumerate(data__libraries):
+                    if not isinstance(data__libraries_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".libraries[{data__libraries_x}]".format(**locals()) + " must be string", value=data__libraries_item, name="" + (name_prefix or "data") + ".libraries[{data__libraries_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "runtime-library-dirs" in data_keys:
+            data_keys.remove("runtime-library-dirs")
+            data__runtimelibrarydirs = data["runtime-library-dirs"]
+            if not isinstance(data__runtimelibrarydirs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime-library-dirs must be array", value=data__runtimelibrarydirs, name="" + (name_prefix or "data") + ".runtime-library-dirs", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__runtimelibrarydirs_is_list = isinstance(data__runtimelibrarydirs, (list, tuple))
+            if data__runtimelibrarydirs_is_list:
+                data__runtimelibrarydirs_len = len(data__runtimelibrarydirs)
+                for data__runtimelibrarydirs_x, data__runtimelibrarydirs_item in enumerate(data__runtimelibrarydirs):
+                    if not isinstance(data__runtimelibrarydirs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime-library-dirs[{data__runtimelibrarydirs_x}]".format(**locals()) + " must be string", value=data__runtimelibrarydirs_item, name="" + (name_prefix or "data") + ".runtime-library-dirs[{data__runtimelibrarydirs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "extra-objects" in data_keys:
+            data_keys.remove("extra-objects")
+            data__extraobjects = data["extra-objects"]
+            if not isinstance(data__extraobjects, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-objects must be array", value=data__extraobjects, name="" + (name_prefix or "data") + ".extra-objects", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__extraobjects_is_list = isinstance(data__extraobjects, (list, tuple))
+            if data__extraobjects_is_list:
+                data__extraobjects_len = len(data__extraobjects)
+                for data__extraobjects_x, data__extraobjects_item in enumerate(data__extraobjects):
+                    if not isinstance(data__extraobjects_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-objects[{data__extraobjects_x}]".format(**locals()) + " must be string", value=data__extraobjects_item, name="" + (name_prefix or "data") + ".extra-objects[{data__extraobjects_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "extra-compile-args" in data_keys:
+            data_keys.remove("extra-compile-args")
+            data__extracompileargs = data["extra-compile-args"]
+            if not isinstance(data__extracompileargs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-compile-args must be array", value=data__extracompileargs, name="" + (name_prefix or "data") + ".extra-compile-args", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__extracompileargs_is_list = isinstance(data__extracompileargs, (list, tuple))
+            if data__extracompileargs_is_list:
+                data__extracompileargs_len = len(data__extracompileargs)
+                for data__extracompileargs_x, data__extracompileargs_item in enumerate(data__extracompileargs):
+                    if not isinstance(data__extracompileargs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-compile-args[{data__extracompileargs_x}]".format(**locals()) + " must be string", value=data__extracompileargs_item, name="" + (name_prefix or "data") + ".extra-compile-args[{data__extracompileargs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "extra-link-args" in data_keys:
+            data_keys.remove("extra-link-args")
+            data__extralinkargs = data["extra-link-args"]
+            if not isinstance(data__extralinkargs, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-link-args must be array", value=data__extralinkargs, name="" + (name_prefix or "data") + ".extra-link-args", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__extralinkargs_is_list = isinstance(data__extralinkargs, (list, tuple))
+            if data__extralinkargs_is_list:
+                data__extralinkargs_len = len(data__extralinkargs)
+                for data__extralinkargs_x, data__extralinkargs_item in enumerate(data__extralinkargs):
+                    if not isinstance(data__extralinkargs_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".extra-link-args[{data__extralinkargs_x}]".format(**locals()) + " must be string", value=data__extralinkargs_item, name="" + (name_prefix or "data") + ".extra-link-args[{data__extralinkargs_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "export-symbols" in data_keys:
+            data_keys.remove("export-symbols")
+            data__exportsymbols = data["export-symbols"]
+            if not isinstance(data__exportsymbols, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".export-symbols must be array", value=data__exportsymbols, name="" + (name_prefix or "data") + ".export-symbols", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__exportsymbols_is_list = isinstance(data__exportsymbols, (list, tuple))
+            if data__exportsymbols_is_list:
+                data__exportsymbols_len = len(data__exportsymbols)
+                for data__exportsymbols_x, data__exportsymbols_item in enumerate(data__exportsymbols):
+                    if not isinstance(data__exportsymbols_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".export-symbols[{data__exportsymbols_x}]".format(**locals()) + " must be string", value=data__exportsymbols_item, name="" + (name_prefix or "data") + ".export-symbols[{data__exportsymbols_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "swig-opts" in data_keys:
+            data_keys.remove("swig-opts")
+            data__swigopts = data["swig-opts"]
+            if not isinstance(data__swigopts, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".swig-opts must be array", value=data__swigopts, name="" + (name_prefix or "data") + ".swig-opts", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__swigopts_is_list = isinstance(data__swigopts, (list, tuple))
+            if data__swigopts_is_list:
+                data__swigopts_len = len(data__swigopts)
+                for data__swigopts_x, data__swigopts_item in enumerate(data__swigopts):
+                    if not isinstance(data__swigopts_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".swig-opts[{data__swigopts_x}]".format(**locals()) + " must be string", value=data__swigopts_item, name="" + (name_prefix or "data") + ".swig-opts[{data__swigopts_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "depends" in data_keys:
+            data_keys.remove("depends")
+            data__depends = data["depends"]
+            if not isinstance(data__depends, (list, tuple)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".depends must be array", value=data__depends, name="" + (name_prefix or "data") + ".depends", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+            data__depends_is_list = isinstance(data__depends, (list, tuple))
+            if data__depends_is_list:
+                data__depends_len = len(data__depends)
+                for data__depends_x, data__depends_item in enumerate(data__depends):
+                    if not isinstance(data__depends_item, (str)):
+                        raise JsonSchemaValueException("" + (name_prefix or "data") + ".depends[{data__depends_x}]".format(**locals()) + " must be string", value=data__depends_item, name="" + (name_prefix or "data") + ".depends[{data__depends_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+        if "language" in data_keys:
+            data_keys.remove("language")
+            data__language = data["language"]
+            if not isinstance(data__language, (str)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".language must be string", value=data__language, name="" + (name_prefix or "data") + ".language", definition={'type': 'string'}, rule='type')
+        if "optional" in data_keys:
+            data_keys.remove("optional")
+            data__optional = data["optional"]
+            if not isinstance(data__optional, (bool)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional must be boolean", value=data__optional, name="" + (name_prefix or "data") + ".optional", definition={'type': 'boolean'}, rule='type')
+        if "py-limited-api" in data_keys:
+            data_keys.remove("py-limited-api")
+            data__pylimitedapi = data["py-limited-api"]
+            if not isinstance(data__pylimitedapi, (bool)):
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-limited-api must be boolean", value=data__pylimitedapi, name="" + (name_prefix or "data") + ".py-limited-api", definition={'type': 'boolean'}, rule='type')
+        if data_keys:
+            raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/ext-module', 'title': 'Extension module', 'description': 'Parameters to construct a :class:`setuptools.Extension` object', 'type': 'object', 'required': ['name', 'sources'], 'additionalProperties': False, 'properties': {'name': {'type': 'string', 'format': 'python-module-name-relaxed'}, 'sources': {'type': 'array', 'items': {'type': 'string'}}, 'include-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'define-macros': {'type': 'array', 'items': {'type': 'array', 'items': [{'description': 'macro name', 'type': 'string'}, {'description': 'macro value', 'oneOf': [{'type': 'string'}, {'type': 'null'}]}], 'additionalItems': False}}, 'undef-macros': {'type': 'array', 'items': {'type': 'string'}}, 'library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'libraries': {'type': 'array', 'items': {'type': 'string'}}, 'runtime-library-dirs': {'type': 'array', 'items': {'type': 'string'}}, 'extra-objects': {'type': 'array', 'items': {'type': 'string'}}, 'extra-compile-args': {'type': 'array', 'items': {'type': 'string'}}, 'extra-link-args': {'type': 'array', 'items': {'type': 'string'}}, 'export-symbols': {'type': 'array', 'items': {'type': 'string'}}, 'swig-opts': {'type': 'array', 'items': {'type': 'string'}}, 'depends': {'type': 'array', 'items': {'type': 'string'}}, 'language': {'type': 'string'}, 'optional': {'type': 'boolean'}, 'py-limited-api': {'type': 'boolean'}}}, rule='additionalProperties')
+    return data
+
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_find_directive(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (dict)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type')
@@ -674,26 +889,26 @@ def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_htm
 def validate_https___setuptools_pypa_io_en_latest_userguide_pyproject_config_html__definitions_package_name(data, custom_formats={}, name_prefix=None):
     if not isinstance(data, (str)):
         raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='type')
-    data_any_of_count9 = 0
-    if not data_any_of_count9:
+    data_any_of_count10 = 0
+    if not data_any_of_count10:
         try:
             if not isinstance(data, (str)):
                 raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='type')
             if isinstance(data, str):
                 if not custom_formats["python-module-name-relaxed"](data):
                     raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-module-name-relaxed", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'python-module-name-relaxed'}, rule='format')
-            data_any_of_count9 += 1
+            data_any_of_count10 += 1
         except JsonSchemaValueException: pass
-    if not data_any_of_count9:
+    if not data_any_of_count10:
         try:
             if not isinstance(data, (str)):
                 raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'pep561-stub-name'}, rule='type')
             if isinstance(data, str):
                 if not custom_formats["pep561-stub-name"](data):
                     raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep561-stub-name", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'format': 'pep561-stub-name'}, rule='format')
-            data_any_of_count9 += 1
+            data_any_of_count10 += 1
         except JsonSchemaValueException: pass
-    if not data_any_of_count9:
+    if not data_any_of_count10:
         raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/package-name', 'title': 'Valid package name', 'description': 'Valid package name (importable or :pep:`561`).', 'type': 'string', 'anyOf': [{'type': 'string', 'format': 'python-module-name-relaxed'}, {'type': 'string', 'format': 'pep561-stub-name'}]}, rule='anyOf')
     return data
 
@@ -749,19 +964,19 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
         if "readme" in data_keys:
             data_keys.remove("readme")
             data__readme = data["readme"]
-            data__readme_one_of_count10 = 0
-            if data__readme_one_of_count10 < 2:
+            data__readme_one_of_count11 = 0
+            if data__readme_one_of_count11 < 2:
                 try:
                     if not isinstance(data__readme, (str)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type')
-                    data__readme_one_of_count10 += 1
+                    data__readme_one_of_count11 += 1
                 except JsonSchemaValueException: pass
-            if data__readme_one_of_count10 < 2:
+            if data__readme_one_of_count11 < 2:
                 try:
                     if not isinstance(data__readme, (dict)):
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type')
-                    data__readme_any_of_count11 = 0
-                    if not data__readme_any_of_count11:
+                    data__readme_any_of_count12 = 0
+                    if not data__readme_any_of_count12:
                         try:
                             data__readme_is_dict = isinstance(data__readme, dict)
                             if data__readme_is_dict:
@@ -774,9 +989,9 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                                     data__readme__file = data__readme["file"]
                                     if not isinstance(data__readme__file, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type')
-                            data__readme_any_of_count11 += 1
+                            data__readme_any_of_count12 += 1
                         except JsonSchemaValueException: pass
-                    if not data__readme_any_of_count11:
+                    if not data__readme_any_of_count12:
                         try:
                             data__readme_is_dict = isinstance(data__readme, dict)
                             if data__readme_is_dict:
@@ -789,9 +1004,9 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                                     data__readme__text = data__readme["text"]
                                     if not isinstance(data__readme__text, (str)):
                                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type')
-                            data__readme_any_of_count11 += 1
+                            data__readme_any_of_count12 += 1
                         except JsonSchemaValueException: pass
-                    if not data__readme_any_of_count11:
+                    if not data__readme_any_of_count12:
                         raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf')
                     data__readme_is_dict = isinstance(data__readme, dict)
                     if data__readme_is_dict:
@@ -804,10 +1019,10 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                             data__readme__contenttype = data__readme["content-type"]
                             if not isinstance(data__readme__contenttype, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type')
-                    data__readme_one_of_count10 += 1
+                    data__readme_one_of_count11 += 1
                 except JsonSchemaValueException: pass
-            if data__readme_one_of_count10 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count10) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
+            if data__readme_one_of_count11 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count11) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '`_', "with meaning similar to the one defined in `core metadata's Description", '`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
         if "requires-python" in data_keys:
             data_keys.remove("requires-python")
             data__requirespython = data["requires-python"]
@@ -819,8 +1034,8 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
         if "license" in data_keys:
             data_keys.remove("license")
             data__license = data["license"]
-            data__license_one_of_count12 = 0
-            if data__license_one_of_count12 < 2:
+            data__license_one_of_count13 = 0
+            if data__license_one_of_count13 < 2:
                 try:
                     data__license_is_dict = isinstance(data__license, dict)
                     if data__license_is_dict:
@@ -833,9 +1048,9 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                             data__license__file = data__license["file"]
                             if not isinstance(data__license__file, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type')
-                    data__license_one_of_count12 += 1
+                    data__license_one_of_count13 += 1
                 except JsonSchemaValueException: pass
-            if data__license_one_of_count12 < 2:
+            if data__license_one_of_count13 < 2:
                 try:
                     data__license_is_dict = isinstance(data__license, dict)
                     if data__license_is_dict:
@@ -848,10 +1063,10 @@ def validate_https___packaging_python_org_en_latest_specifications_pyproject_tom
                             data__license__text = data__license["text"]
                             if not isinstance(data__license__text, (str)):
                                 raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}, rule='type')
-                    data__license_one_of_count12 += 1
+                    data__license_one_of_count13 += 1
                 except JsonSchemaValueException: pass
-            if data__license_one_of_count12 != 1:
-                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count12) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf')
+            if data__license_one_of_count13 != 1:
+                raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count13) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license `_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '`_.']}}, 'required': ['text']}]}, rule='oneOf')
         if "authors" in data_keys:
             data_keys.remove("authors")
             data__authors = data["authors"]
diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py
index 3449a4bfb7..5d95e18b83 100644
--- a/setuptools/config/pyprojecttoml.py
+++ b/setuptools/config/pyprojecttoml.py
@@ -130,6 +130,9 @@ def read_configuration(
     asdict["tool"] = tool_table
     tool_table["setuptools"] = setuptools_table
 
+    if "ext-modules" in setuptools_table:
+        _ExperimentalConfiguration.emit(subject="[tool.setuptools.ext-modules]")
+
     with _ignore_errors(ignore_option_errors):
         # Don't complain about unrelated errors (e.g. tools not using the "tool" table)
         subset = {"project": project_table, "tool": {"setuptools": setuptools_table}}
diff --git a/setuptools/config/setuptools.schema.json b/setuptools/config/setuptools.schema.json
index 50ee6217ee..ec887b3573 100644
--- a/setuptools/config/setuptools.schema.json
+++ b/setuptools/config/setuptools.schema.json
@@ -158,6 +158,11 @@
       "items": {"type": "string", "format": "python-module-name-relaxed"},
       "$comment": "TODO: clarify the relationship with ``packages``"
     },
+    "ext-modules": {
+      "description": "Extension modules to be compiled by setuptools",
+      "type": "array",
+      "items": {"$ref": "#/definitions/ext-module"}
+    },
     "data-files": {
       "$$description": [
         "``dict``-like structure where each key represents a directory and",
@@ -254,6 +259,82 @@
         {"type": "string", "format": "pep561-stub-name"}
       ]
     },
+    "ext-module": {
+      "$id": "#/definitions/ext-module",
+      "title": "Extension module",
+      "description": "Parameters to construct a :class:`setuptools.Extension` object",
+      "type": "object",
+      "required": ["name", "sources"],
+      "additionalProperties": false,
+      "properties": {
+        "name": {
+          "type": "string",
+          "format": "python-module-name-relaxed"
+        },
+        "sources": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "include-dirs":{
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "define-macros": {
+          "type": "array",
+          "items": {
+            "type": "array",
+            "items": [
+              {"description": "macro name", "type": "string"},
+              {"description": "macro value", "oneOf": [{"type": "string"}, {"type": "null"}]}
+            ],
+            "additionalItems": false
+          }
+        },
+        "undef-macros": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "library-dirs": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "libraries": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "runtime-library-dirs": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "extra-objects": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "extra-compile-args": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "extra-link-args": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "export-symbols": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "swig-opts": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "depends": {
+          "type": "array",
+          "items": {"type": "string"}
+        },
+        "language": {"type": "string"},
+        "optional": {"type": "boolean"},
+        "py-limited-api": {"type": "boolean"}
+      }
+    },
     "file-directive": {
       "$id": "#/definitions/file-directive",
       "title": "'file:' directive",
diff --git a/setuptools/extension.py b/setuptools/extension.py
index dcc7709982..cbefc72508 100644
--- a/setuptools/extension.py
+++ b/setuptools/extension.py
@@ -127,7 +127,7 @@ class Extension(_Extension):
     :keyword bool py_limited_api:
       opt-in flag for the usage of :doc:`Python's limited API `.
 
-    :raises setuptools.errors.PlatformError: if 'runtime_library_dirs' is
+    :raises setuptools.errors.PlatformError: if ``runtime_library_dirs`` is
       specified on Windows. (since v63)
     """
 
diff --git a/setuptools/monkey.py b/setuptools/monkey.py
index a69ccd3312..3b6eefb4dd 100644
--- a/setuptools/monkey.py
+++ b/setuptools/monkey.py
@@ -4,12 +4,10 @@
 
 from __future__ import annotations
 
-import functools
 import inspect
 import platform
 import sys
 import types
-from importlib import import_module
 from typing import Type, TypeVar, cast, overload
 
 import distutils.filelist
@@ -91,8 +89,6 @@ def patch_all():
             'distutils.command.build_ext'
         ].Extension = setuptools.extension.Extension
 
-    patch_for_msvc_specialized_compiler()
-
 
 def _patch_distribution_metadata():
     from . import _core_metadata
@@ -128,36 +124,3 @@ def patch_func(replacement, target_mod, func_name):
 
 def get_unpatched_function(candidate):
     return candidate.unpatched
-
-
-def patch_for_msvc_specialized_compiler():
-    """
-    Patch functions in distutils to use standalone Microsoft Visual C++
-    compilers.
-    """
-    from . import msvc
-
-    if platform.system() != 'Windows':
-        # Compilers only available on Microsoft Windows
-        return
-
-    def patch_params(mod_name, func_name):
-        """
-        Prepare the parameters for patch_func to patch indicated function.
-        """
-        repl_prefix = 'msvc14_'
-        repl_name = repl_prefix + func_name.lstrip('_')
-        repl = getattr(msvc, repl_name)
-        mod = import_module(mod_name)
-        if not hasattr(mod, func_name):
-            raise ImportError(func_name)
-        return repl, mod, func_name
-
-    # Python 3.5+
-    msvc14 = functools.partial(patch_params, 'distutils._msvccompiler')
-
-    try:
-        # Patch distutils._msvccompiler._get_vc_env
-        patch_func(*msvc14('_get_vc_env'))
-    except ImportError:
-        pass
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 57f09417ca..de4b05f928 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -1,14 +1,8 @@
 """
-Improved support for Microsoft Visual C++ compilers.
+Environment info about Microsoft Compilers.
 
-Known supported compilers:
---------------------------
-Microsoft Visual C++ 14.X:
-    Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
-    Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
-    Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64)
-
-This may also support compilers shipped with compatible Visual Studio versions.
+>>> getfixture('windows_only')
+>>> ei = EnvironmentInfo('amd64')
 """
 
 from __future__ import annotations
@@ -16,17 +10,14 @@
 import contextlib
 import itertools
 import json
+import os
+import os.path
 import platform
-import subprocess
-from os import listdir, pathsep
-from os.path import dirname, isdir, isfile, join
-from subprocess import CalledProcessError
 from typing import TYPE_CHECKING
 
 from more_itertools import unique_everseen
 
 import distutils.errors
-from distutils.util import get_platform
 
 # https://github.com/python/mypy/issues/8166
 if not TYPE_CHECKING and platform.system() == 'Windows':
@@ -44,240 +35,6 @@ class winreg:
     environ: dict[str, str] = dict()
 
 
-def _msvc14_find_vc2015():
-    """Python 3.8 "distutils/_msvccompiler.py" backport"""
-    try:
-        key = winreg.OpenKey(
-            winreg.HKEY_LOCAL_MACHINE,
-            r"Software\Microsoft\VisualStudio\SxS\VC7",
-            0,
-            winreg.KEY_READ | winreg.KEY_WOW64_32KEY,
-        )
-    except OSError:
-        return None, None
-
-    best_version = 0
-    best_dir = None
-    with key:
-        for i in itertools.count():
-            try:
-                v, vc_dir, vt = winreg.EnumValue(key, i)
-            except OSError:
-                break
-            if v and vt == winreg.REG_SZ and isdir(vc_dir):
-                try:
-                    version = int(float(v))
-                except (ValueError, TypeError):
-                    continue
-                if version >= 14 and version > best_version:
-                    best_version, best_dir = version, vc_dir
-    return best_version, best_dir
-
-
-def _msvc14_find_vc2017():
-    """Python 3.8 "distutils/_msvccompiler.py" backport
-
-    Returns "15, path" based on the result of invoking vswhere.exe
-    If no install is found, returns "None, None"
-
-    The version is returned to avoid unnecessarily changing the function
-    result. It may be ignored when the path is not None.
-
-    If vswhere.exe is not available, by definition, VS 2017 is not
-    installed.
-    """
-    root = environ.get("ProgramFiles(x86)") or environ.get("ProgramFiles")
-    if not root:
-        return None, None
-
-    variant = 'arm64' if get_platform() == 'win-arm64' else 'x86.x64'
-    suitable_components = (
-        f"Microsoft.VisualStudio.Component.VC.Tools.{variant}",
-        "Microsoft.VisualStudio.Workload.WDExpress",
-    )
-
-    for component in suitable_components:
-        # Workaround for `-requiresAny` (only available on VS 2017 > 15.6)
-        with contextlib.suppress(CalledProcessError, OSError, UnicodeDecodeError):
-            path = (
-                subprocess.check_output([
-                    join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
-                    "-latest",
-                    "-prerelease",
-                    "-requires",
-                    component,
-                    "-property",
-                    "installationPath",
-                    "-products",
-                    "*",
-                ])
-                .decode(encoding="mbcs", errors="strict")
-                .strip()
-            )
-
-            path = join(path, "VC", "Auxiliary", "Build")
-            if isdir(path):
-                return 15, path
-
-    return None, None  # no suitable component found
-
-
-PLAT_SPEC_TO_RUNTIME = {
-    'x86': 'x86',
-    'x86_amd64': 'x64',
-    'x86_arm': 'arm',
-    'x86_arm64': 'arm64',
-}
-
-
-def _msvc14_find_vcvarsall(plat_spec):
-    """Python 3.8 "distutils/_msvccompiler.py" backport"""
-    _, best_dir = _msvc14_find_vc2017()
-    vcruntime = None
-
-    if plat_spec in PLAT_SPEC_TO_RUNTIME:
-        vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec]
-    else:
-        vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
-
-    if best_dir:
-        vcredist = join(
-            best_dir,
-            "..",
-            "..",
-            "redist",
-            "MSVC",
-            "**",
-            vcruntime_plat,
-            "Microsoft.VC14*.CRT",
-            "vcruntime140.dll",
-        )
-        try:
-            import glob
-
-            vcruntime = glob.glob(vcredist, recursive=True)[-1]
-        except (ImportError, OSError, LookupError):
-            vcruntime = None
-
-    if not best_dir:
-        best_version, best_dir = _msvc14_find_vc2015()
-        if best_version:
-            vcruntime = join(
-                best_dir,
-                'redist',
-                vcruntime_plat,
-                "Microsoft.VC140.CRT",
-                "vcruntime140.dll",
-            )
-
-    if not best_dir:
-        return None, None
-
-    vcvarsall = join(best_dir, "vcvarsall.bat")
-    if not isfile(vcvarsall):
-        return None, None
-
-    if not vcruntime or not isfile(vcruntime):
-        vcruntime = None
-
-    return vcvarsall, vcruntime
-
-
-def _msvc14_get_vc_env(plat_spec):
-    """Python 3.8 "distutils/_msvccompiler.py" backport"""
-    if "DISTUTILS_USE_SDK" in environ:
-        return {key.lower(): value for key, value in environ.items()}
-
-    vcvarsall, vcruntime = _msvc14_find_vcvarsall(plat_spec)
-    if not vcvarsall:
-        raise distutils.errors.DistutilsPlatformError("Unable to find vcvarsall.bat")
-
-    try:
-        out = subprocess.check_output(
-            'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
-            stderr=subprocess.STDOUT,
-        ).decode('utf-16le', errors='replace')
-    except subprocess.CalledProcessError as exc:
-        raise distutils.errors.DistutilsPlatformError(
-            "Error executing {}".format(exc.cmd)
-        ) from exc
-
-    env = {
-        key.lower(): value
-        for key, _, value in (line.partition('=') for line in out.splitlines())
-        if key and value
-    }
-
-    if vcruntime:
-        env['py_vcruntime_redist'] = vcruntime
-    return env
-
-
-def msvc14_get_vc_env(plat_spec):
-    """
-    Patched "distutils._msvccompiler._get_vc_env" for support extra
-    Microsoft Visual C++ 14.X compilers.
-
-    Set environment without use of "vcvarsall.bat".
-
-    Parameters
-    ----------
-    plat_spec: str
-        Target architecture.
-
-    Return
-    ------
-    dict
-        environment
-    """
-
-    # Always use backport from CPython 3.8
-    try:
-        return _msvc14_get_vc_env(plat_spec)
-    except distutils.errors.DistutilsPlatformError as exc:
-        _augment_exception(exc, 14.0)
-        raise
-
-
-def _augment_exception(exc, version, arch=''):
-    """
-    Add details to the exception message to help guide the user
-    as to what action will resolve it.
-    """
-    # Error if MSVC++ directory not found or environment not set
-    message = exc.args[0]
-
-    if "vcvarsall" in message.lower() or "visual c" in message.lower():
-        # Special error message if MSVC++ not installed
-        tmpl = 'Microsoft Visual C++ {version:0.1f} or greater is required.'
-        message = tmpl.format(**locals())
-        msdownload = 'www.microsoft.com/download/details.aspx?id=%d'
-        if version == 9.0:
-            if arch.lower().find('ia64') > -1:
-                # For VC++ 9.0, if IA64 support is needed, redirect user
-                # to Windows SDK 7.0.
-                # Note: No download link available from Microsoft.
-                message += ' Get it with "Microsoft Windows SDK 7.0"'
-            else:
-                # For VC++ 9.0 redirect user to Vc++ for Python 2.7 :
-                # This redirection link is maintained by Microsoft.
-                # Contact vspython@microsoft.com if it needs updating.
-                message += ' Get it from http://aka.ms/vcpython27'
-        elif version == 10.0:
-            # For VC++ 10.0 Redirect user to Windows SDK 7.1
-            message += ' Get it with "Microsoft Windows SDK 7.1": '
-            message += msdownload % 8279
-        elif version >= 14.0:
-            # For VC++ 14.X Redirect user to latest Visual C++ Build Tools
-            message += (
-                ' Get it with "Microsoft C++ Build Tools": '
-                r'https://visualstudio.microsoft.com'
-                r'/visual-cpp-build-tools/'
-            )
-
-    exc.args = (message,)
-
-
 class PlatformInfo:
     """
     Current and Target Architectures information.
@@ -441,7 +198,7 @@ def sxs(self):
         str
             Registry key
         """
-        return join(self.visualstudio, 'SxS')
+        return os.path.join(self.visualstudio, 'SxS')
 
     @property
     def vc(self):
@@ -453,7 +210,7 @@ def vc(self):
         str
             Registry key
         """
-        return join(self.sxs, 'VC7')
+        return os.path.join(self.sxs, 'VC7')
 
     @property
     def vs(self):
@@ -465,7 +222,7 @@ def vs(self):
         str
             Registry key
         """
-        return join(self.sxs, 'VS7')
+        return os.path.join(self.sxs, 'VS7')
 
     @property
     def vc_for_python(self):
@@ -501,7 +258,7 @@ def windows_sdk(self):
         str
             Registry key
         """
-        return join(self.microsoft_sdk, 'Windows')
+        return os.path.join(self.microsoft_sdk, 'Windows')
 
     @property
     def netfx_sdk(self):
@@ -513,7 +270,7 @@ def netfx_sdk(self):
         str
             Registry key
         """
-        return join(self.microsoft_sdk, 'NETFXSDK')
+        return os.path.join(self.microsoft_sdk, 'NETFXSDK')
 
     @property
     def windows_kits_roots(self):
@@ -544,7 +301,7 @@ def microsoft(self, key, x86=False):
             Registry key
         """
         node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node'
-        return join('Software', node64, 'Microsoft', key)
+        return os.path.join('Software', node64, 'Microsoft', key)
 
     def lookup(self, key, name):
         """
@@ -680,7 +437,7 @@ def find_programdata_vs_vers(self):
         instances_dir = r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances'
 
         try:
-            hashed_names = listdir(instances_dir)
+            hashed_names = os.listdir(instances_dir)
 
         except OSError:
             # Directory not exists with all Visual Studio versions
@@ -689,13 +446,13 @@ def find_programdata_vs_vers(self):
         for name in hashed_names:
             try:
                 # Get VS installation path from "state.json" file
-                state_path = join(instances_dir, name, 'state.json')
+                state_path = os.path.join(instances_dir, name, 'state.json')
                 with open(state_path, 'rt', encoding='utf-8') as state_file:
                     state = json.load(state_file)
                 vs_path = state['installationPath']
 
                 # Raises OSError if this VS installation does not contain VC
-                listdir(join(vs_path, r'VC\Tools\MSVC'))
+                os.listdir(os.path.join(vs_path, r'VC\Tools\MSVC'))
 
                 # Store version and path
                 vs_versions[self._as_float_version(state['installationVersion'])] = (
@@ -736,7 +493,7 @@ def VSInstallDir(self):
             path
         """
         # Default path
-        default = join(
+        default = os.path.join(
             self.ProgramFilesx86, 'Microsoft Visual Studio %0.1f' % self.vs_ver
         )
 
@@ -755,7 +512,7 @@ def VCInstallDir(self):
         """
         path = self._guess_vc() or self._guess_vc_legacy()
 
-        if not isdir(path):
+        if not os.path.isdir(path):
             msg = 'Microsoft Visual C++ directory not found'
             raise distutils.errors.DistutilsPlatformError(msg)
 
@@ -780,14 +537,14 @@ def _guess_vc(self):
             # Else, search with path from registry
             vs_dir = self.VSInstallDir
 
-        guess_vc = join(vs_dir, r'VC\Tools\MSVC')
+        guess_vc = os.path.join(vs_dir, r'VC\Tools\MSVC')
 
         # Subdir with VC exact version as name
         try:
             # Update the VC version with real one instead of VS version
-            vc_ver = listdir(guess_vc)[-1]
+            vc_ver = os.listdir(guess_vc)[-1]
             self.vc_ver = self._as_float_version(vc_ver)
-            return join(guess_vc, vc_ver)
+            return os.path.join(guess_vc, vc_ver)
         except (OSError, IndexError):
             return ''
 
@@ -800,14 +557,14 @@ def _guess_vc_legacy(self):
         str
             path
         """
-        default = join(
+        default = os.path.join(
             self.ProgramFilesx86, r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver
         )
 
         # Try to get "VC++ for Python" path from registry as default path
-        reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
+        reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
         python_vc = self.ri.lookup(reg_path, 'installdir')
-        default_vc = join(python_vc, 'VC') if python_vc else default
+        default_vc = os.path.join(python_vc, 'VC') if python_vc else default
 
         # Try to get path from registry, if fail use default path
         return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc
@@ -844,7 +601,7 @@ def WindowsSdkLastVersion(self):
         str
             version
         """
-        return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
+        return self._use_last_dir_name(os.path.join(self.WindowsSdkDir, 'lib'))
 
     @property
     def WindowsSdkDir(self):  # noqa: C901  # is too complex (12)  # FIXME
@@ -859,34 +616,34 @@ def WindowsSdkDir(self):  # noqa: C901  # is too complex (12)  # FIXME
         sdkdir = ''
         for ver in self.WindowsSdkVersion:
             # Try to get it from registry
-            loc = join(self.ri.windows_sdk, 'v%s' % ver)
+            loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver)
             sdkdir = self.ri.lookup(loc, 'installationfolder')
             if sdkdir:
                 break
-        if not sdkdir or not isdir(sdkdir):
+        if not sdkdir or not os.path.isdir(sdkdir):
             # Try to get "VC++ for Python" version from registry
-            path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
+            path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
             install_base = self.ri.lookup(path, 'installdir')
             if install_base:
-                sdkdir = join(install_base, 'WinSDK')
-        if not sdkdir or not isdir(sdkdir):
+                sdkdir = os.path.join(install_base, 'WinSDK')
+        if not sdkdir or not os.path.isdir(sdkdir):
             # If fail, use default new path
             for ver in self.WindowsSdkVersion:
                 intver = ver[: ver.rfind('.')]
                 path = r'Microsoft SDKs\Windows Kits\%s' % intver
-                d = join(self.ProgramFiles, path)
-                if isdir(d):
+                d = os.path.join(self.ProgramFiles, path)
+                if os.path.isdir(d):
                     sdkdir = d
-        if not sdkdir or not isdir(sdkdir):
+        if not sdkdir or not os.path.isdir(sdkdir):
             # If fail, use default old path
             for ver in self.WindowsSdkVersion:
                 path = r'Microsoft SDKs\Windows\v%s' % ver
-                d = join(self.ProgramFiles, path)
-                if isdir(d):
+                d = os.path.join(self.ProgramFiles, path)
+                if os.path.isdir(d):
                     sdkdir = d
         if not sdkdir:
             # If fail, use Platform SDK
-            sdkdir = join(self.VCInstallDir, 'PlatformSDK')
+            sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK')
         return sdkdir
 
     @property
@@ -913,10 +670,10 @@ def WindowsSDKExecutablePath(self):
         regpaths = []
         if self.vs_ver >= 14.0:
             for ver in self.NetFxSdkVersion:
-                regpaths += [join(self.ri.netfx_sdk, ver, fx)]
+                regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)]
 
         for ver in self.WindowsSdkVersion:
-            regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
+            regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
 
         # Return installation folder from the more recent path
         for path in regpaths:
@@ -936,7 +693,7 @@ def FSharpInstallDir(self):
         str
             path
         """
-        path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
+        path = os.path.join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
         return self.ri.lookup(path, 'productdir') or ''
 
     @property
@@ -970,7 +727,7 @@ def UniversalCRTSdkLastVersion(self):
         str
             version
         """
-        return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib'))
+        return self._use_last_dir_name(os.path.join(self.UniversalCRTSdkDir, 'lib'))
 
     @property
     def NetFxSdkVersion(self):
@@ -1001,7 +758,7 @@ def NetFxSdkDir(self):
         """
         sdkdir = ''
         for ver in self.NetFxSdkVersion:
-            loc = join(self.ri.netfx_sdk, ver)
+            loc = os.path.join(self.ri.netfx_sdk, ver)
             sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder')
             if sdkdir:
                 break
@@ -1018,7 +775,7 @@ def FrameworkDir32(self):
             path
         """
         # Default path
-        guess_fw = join(self.WinDir, r'Microsoft.NET\Framework')
+        guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework')
 
         # Try to get path from registry, if fail use default path
         return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw
@@ -1034,7 +791,7 @@ def FrameworkDir64(self):
             path
         """
         # Default path
-        guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64')
+        guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64')
 
         # Try to get path from registry, if fail use default path
         return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw
@@ -1112,8 +869,9 @@ def _use_last_dir_name(path, prefix=''):
         """
         matching_dirs = (
             dir_name
-            for dir_name in reversed(listdir(path))
-            if isdir(join(path, dir_name)) and dir_name.startswith(prefix)
+            for dir_name in reversed(os.listdir(path))
+            if os.path.isdir(os.path.join(path, dir_name))
+            and dir_name.startswith(prefix)
         )
         return next(matching_dirs, None) or ''
 
@@ -1193,7 +951,7 @@ def VSTools(self):
             paths += [r'Team Tools\Performance Tools']
             paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
 
-        return [join(self.si.VSInstallDir, path) for path in paths]
+        return [os.path.join(self.si.VSInstallDir, path) for path in paths]
 
     @property
     def VCIncludes(self):
@@ -1206,8 +964,8 @@ def VCIncludes(self):
             paths
         """
         return [
-            join(self.si.VCInstallDir, 'Include'),
-            join(self.si.VCInstallDir, r'ATLMFC\Include'),
+            os.path.join(self.si.VCInstallDir, 'Include'),
+            os.path.join(self.si.VCInstallDir, r'ATLMFC\Include'),
         ]
 
     @property
@@ -1229,7 +987,7 @@ def VCLibraries(self):
         if self.vs_ver >= 14.0:
             paths += [r'Lib\store%s' % arch_subdir]
 
-        return [join(self.si.VCInstallDir, path) for path in paths]
+        return [os.path.join(self.si.VCInstallDir, path) for path in paths]
 
     @property
     def VCStoreRefs(self):
@@ -1243,7 +1001,7 @@ def VCStoreRefs(self):
         """
         if self.vs_ver < 14.0:
             return []
-        return [join(self.si.VCInstallDir, r'Lib\store\references')]
+        return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')]
 
     @property
     def VCTools(self):
@@ -1256,30 +1014,34 @@ def VCTools(self):
             paths
         """
         si = self.si
-        tools = [join(si.VCInstallDir, 'VCPackages')]
+        tools = [os.path.join(si.VCInstallDir, 'VCPackages')]
 
         forcex86 = True if self.vs_ver <= 10.0 else False
         arch_subdir = self.pi.cross_dir(forcex86)
         if arch_subdir:
-            tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
+            tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
 
         if self.vs_ver == 14.0:
             path = 'Bin%s' % self.pi.current_dir(hidex86=True)
-            tools += [join(si.VCInstallDir, path)]
+            tools += [os.path.join(si.VCInstallDir, path)]
 
         elif self.vs_ver >= 15.0:
             host_dir = (
                 r'bin\HostX86%s' if self.pi.current_is_x86() else r'bin\HostX64%s'
             )
-            tools += [join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
+            tools += [
+                os.path.join(si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))
+            ]
 
             if self.pi.current_cpu != self.pi.target_cpu:
                 tools += [
-                    join(si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))
+                    os.path.join(
+                        si.VCInstallDir, host_dir % self.pi.current_dir(x64=True)
+                    )
                 ]
 
         else:
-            tools += [join(si.VCInstallDir, 'Bin')]
+            tools += [os.path.join(si.VCInstallDir, 'Bin')]
 
         return tools
 
@@ -1295,13 +1057,13 @@ def OSLibraries(self):
         """
         if self.vs_ver <= 10.0:
             arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
-            return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
+            return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
 
         else:
             arch_subdir = self.pi.target_dir(x64=True)
-            lib = join(self.si.WindowsSdkDir, 'lib')
+            lib = os.path.join(self.si.WindowsSdkDir, 'lib')
             libver = self._sdk_subdir
-            return [join(lib, '%sum%s' % (libver, arch_subdir))]
+            return [os.path.join(lib, '%sum%s' % (libver, arch_subdir))]
 
     @property
     def OSIncludes(self):
@@ -1313,10 +1075,10 @@ def OSIncludes(self):
         list of str
             paths
         """
-        include = join(self.si.WindowsSdkDir, 'include')
+        include = os.path.join(self.si.WindowsSdkDir, 'include')
 
         if self.vs_ver <= 10.0:
-            return [include, join(include, 'gl')]
+            return [include, os.path.join(include, 'gl')]
 
         else:
             if self.vs_ver >= 14.0:
@@ -1324,9 +1086,9 @@ def OSIncludes(self):
             else:
                 sdkver = ''
             return [
-                join(include, '%sshared' % sdkver),
-                join(include, '%sum' % sdkver),
-                join(include, '%swinrt' % sdkver),
+                os.path.join(include, '%sshared' % sdkver),
+                os.path.join(include, '%sum' % sdkver),
+                os.path.join(include, '%swinrt' % sdkver),
             ]
 
     @property
@@ -1339,23 +1101,25 @@ def OSLibpath(self):
         list of str
             paths
         """
-        ref = join(self.si.WindowsSdkDir, 'References')
+        ref = os.path.join(self.si.WindowsSdkDir, 'References')
         libpath = []
 
         if self.vs_ver <= 9.0:
             libpath += self.OSLibraries
 
         if self.vs_ver >= 11.0:
-            libpath += [join(ref, r'CommonConfiguration\Neutral')]
+            libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')]
 
         if self.vs_ver >= 14.0:
             libpath += [
                 ref,
-                join(self.si.WindowsSdkDir, 'UnionMetadata'),
-                join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
-                join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
-                join(ref, 'Windows.Networking.Connectivity.WwanContract', '1.0.0.0'),
-                join(
+                os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'),
+                os.path.join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
+                os.path.join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
+                os.path.join(
+                    ref, 'Windows.Networking.Connectivity.WwanContract', '1.0.0.0'
+                ),
+                os.path.join(
                     self.si.WindowsSdkDir,
                     'ExtensionSDKs',
                     'Microsoft.VCLibs',
@@ -1390,12 +1154,12 @@ def _sdk_tools(self):
         """
         if self.vs_ver < 15.0:
             bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86'
-            yield join(self.si.WindowsSdkDir, bin_dir)
+            yield os.path.join(self.si.WindowsSdkDir, bin_dir)
 
         if not self.pi.current_is_x86():
             arch_subdir = self.pi.current_dir(x64=True)
             path = 'Bin%s' % arch_subdir
-            yield join(self.si.WindowsSdkDir, path)
+            yield os.path.join(self.si.WindowsSdkDir, path)
 
         if self.vs_ver in (10.0, 11.0):
             if self.pi.target_is_x86():
@@ -1403,13 +1167,13 @@ def _sdk_tools(self):
             else:
                 arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
             path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
-            yield join(self.si.WindowsSdkDir, path)
+            yield os.path.join(self.si.WindowsSdkDir, path)
 
         elif self.vs_ver >= 15.0:
-            path = join(self.si.WindowsSdkDir, 'Bin')
+            path = os.path.join(self.si.WindowsSdkDir, 'Bin')
             arch_subdir = self.pi.current_dir(x64=True)
             sdkver = self.si.WindowsSdkLastVersion
-            yield join(path, '%s%s' % (sdkver, arch_subdir))
+            yield os.path.join(path, '%s%s' % (sdkver, arch_subdir))
 
         if self.si.WindowsSDKExecutablePath:
             yield self.si.WindowsSDKExecutablePath
@@ -1440,7 +1204,7 @@ def SdkSetup(self):
         if self.vs_ver > 9.0:
             return []
 
-        return [join(self.si.WindowsSdkDir, 'Setup')]
+        return [os.path.join(self.si.WindowsSdkDir, 'Setup')]
 
     @property
     def FxTools(self):
@@ -1464,9 +1228,13 @@ def FxTools(self):
 
         tools = []
         if include32:
-            tools += [join(si.FrameworkDir32, ver) for ver in si.FrameworkVersion32]
+            tools += [
+                os.path.join(si.FrameworkDir32, ver) for ver in si.FrameworkVersion32
+            ]
         if include64:
-            tools += [join(si.FrameworkDir64, ver) for ver in si.FrameworkVersion64]
+            tools += [
+                os.path.join(si.FrameworkDir64, ver) for ver in si.FrameworkVersion64
+            ]
         return tools
 
     @property
@@ -1483,7 +1251,7 @@ def NetFxSDKLibraries(self):
             return []
 
         arch_subdir = self.pi.target_dir(x64=True)
-        return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
+        return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
 
     @property
     def NetFxSDKIncludes(self):
@@ -1498,7 +1266,7 @@ def NetFxSDKIncludes(self):
         if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
             return []
 
-        return [join(self.si.NetFxSdkDir, r'include\um')]
+        return [os.path.join(self.si.NetFxSdkDir, r'include\um')]
 
     @property
     def VsTDb(self):
@@ -1510,7 +1278,7 @@ def VsTDb(self):
         list of str
             paths
         """
-        return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
+        return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
 
     @property
     def MSBuild(self):
@@ -1532,11 +1300,11 @@ def MSBuild(self):
             arch_subdir = ''
 
         path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir)
-        build = [join(base_path, path)]
+        build = [os.path.join(base_path, path)]
 
         if self.vs_ver >= 15.0:
             # Add Roslyn C# & Visual Basic Compiler
-            build += [join(base_path, path, 'Roslyn')]
+            build += [os.path.join(base_path, path, 'Roslyn')]
 
         return build
 
@@ -1553,7 +1321,7 @@ def HTMLHelpWorkshop(self):
         if self.vs_ver < 11.0:
             return []
 
-        return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
+        return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
 
     @property
     def UCRTLibraries(self):
@@ -1569,9 +1337,9 @@ def UCRTLibraries(self):
             return []
 
         arch_subdir = self.pi.target_dir(x64=True)
-        lib = join(self.si.UniversalCRTSdkDir, 'lib')
+        lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib')
         ucrtver = self._ucrt_subdir
-        return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
+        return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
 
     @property
     def UCRTIncludes(self):
@@ -1586,8 +1354,8 @@ def UCRTIncludes(self):
         if self.vs_ver < 14.0:
             return []
 
-        include = join(self.si.UniversalCRTSdkDir, 'include')
-        return [join(include, '%sucrt' % self._ucrt_subdir)]
+        include = os.path.join(self.si.UniversalCRTSdkDir, 'include')
+        return [os.path.join(include, '%sucrt' % self._ucrt_subdir)]
 
     @property
     def _ucrt_subdir(self):
@@ -1618,14 +1386,11 @@ def FSharp(self):
         return [self.si.FSharpInstallDir]
 
     @property
-    def VCRuntimeRedist(self):
+    def VCRuntimeRedist(self) -> str | None:
         """
         Microsoft Visual C++ runtime redistributable dll.
 
-        Return
-        ------
-        str
-            path
+        Returns the first suitable path found or None.
         """
         vcruntime = 'vcruntime%d0.dll' % self.vc_ver
         arch_subdir = self.pi.target_dir(x64=True).strip('\\')
@@ -1633,13 +1398,13 @@ def VCRuntimeRedist(self):
         # Installation prefixes candidates
         prefixes = []
         tools_path = self.si.VCInstallDir
-        redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist'))
-        if isdir(redist_path):
+        redist_path = os.path.dirname(tools_path.replace(r'\Tools', r'\Redist'))
+        if os.path.isdir(redist_path):
             # Redist version may not be exactly the same as tools
-            redist_path = join(redist_path, listdir(redist_path)[-1])
-            prefixes += [redist_path, join(redist_path, 'onecore')]
+            redist_path = os.path.join(redist_path, os.listdir(redist_path)[-1])
+            prefixes += [redist_path, os.path.join(redist_path, 'onecore')]
 
-        prefixes += [join(tools_path, 'redist')]  # VS14 legacy path
+        prefixes += [os.path.join(tools_path, 'redist')]  # VS14 legacy path
 
         # CRT directory
         crt_dirs = (
@@ -1649,11 +1414,11 @@ def VCRuntimeRedist(self):
         )
 
         # vcruntime path
-        for prefix, crt_dir in itertools.product(prefixes, crt_dirs):
-            path = join(prefix, arch_subdir, crt_dir, vcruntime)
-            if isfile(path):
-                return path
-        return None
+        candidate_paths = (
+            os.path.join(prefix, arch_subdir, crt_dir, vcruntime)
+            for (prefix, crt_dir) in itertools.product(prefixes, crt_dirs)
+        )
+        return next(filter(os.path.isfile, candidate_paths), None)
 
     def return_env(self, exists=True):
         """
@@ -1712,7 +1477,7 @@ def return_env(self, exists=True):
                 exists,
             ),
         )
-        if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist):
+        if self.vs_ver >= 14 and self.VCRuntimeRedist:
             env['py_vcruntime_redist'] = self.VCRuntimeRedist
         return env
 
@@ -1740,11 +1505,11 @@ def _build_paths(self, name, spec_path_lists, exists):
         """
         # flatten spec_path_lists
         spec_paths = itertools.chain.from_iterable(spec_path_lists)
-        env_paths = environ.get(name, '').split(pathsep)
+        env_paths = environ.get(name, '').split(os.pathsep)
         paths = itertools.chain(spec_paths, env_paths)
-        extant_paths = list(filter(isdir, paths)) if exists else paths
+        extant_paths = list(filter(os.path.isdir, paths)) if exists else paths
         if not extant_paths:
             msg = "%s environment variable is empty" % name.upper()
             raise distutils.errors.DistutilsPlatformError(msg)
         unique_paths = unique_everseen(extant_paths)
-        return pathsep.join(unique_paths)
+        return os.pathsep.join(unique_paths)
diff --git a/setuptools/namespaces.py b/setuptools/namespaces.py
index e82439e3ef..299fdd9479 100644
--- a/setuptools/namespaces.py
+++ b/setuptools/namespaces.py
@@ -1,7 +1,7 @@
 import itertools
 import os
 
-from .compat import py39
+from .compat import py312
 
 from distutils import log
 
@@ -25,8 +25,9 @@ def install_namespaces(self):
             list(lines)
             return
 
-        with open(filename, 'wt', encoding=py39.LOCALE_ENCODING) as f:
-            # Requires encoding="locale" instead of "utf-8" (python/cpython#77102).
+        with open(filename, 'wt', encoding=py312.PTH_ENCODING) as f:
+            # Python<3.13 requires encoding="locale" instead of "utf-8"
+            # See: python/cpython#77102
             f.writelines(lines)
 
     def uninstall_namespaces(self):
diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py
index 78959b6454..deee6fa47c 100644
--- a/setuptools/tests/config/test_apply_pyprojecttoml.py
+++ b/setuptools/tests/config/test_apply_pyprojecttoml.py
@@ -324,6 +324,27 @@ def test_invalid_module_name(self, tmp_path, monkeypatch, module):
             self.dist(module).py_modules
 
 
+class TestExtModules:
+    def test_pyproject_sets_attribute(self, tmp_path, monkeypatch):
+        monkeypatch.chdir(tmp_path)
+        pyproject = Path("pyproject.toml")
+        toml_config = """
+        [project]
+        name = "test"
+        version = "42.0"
+        [tool.setuptools]
+        ext-modules = [
+          {name = "my.ext", sources = ["hello.c", "world.c"]}
+        ]
+        """
+        pyproject.write_text(cleandoc(toml_config), encoding="utf-8")
+        with pytest.warns(pyprojecttoml._ExperimentalConfiguration):
+            dist = pyprojecttoml.apply_configuration(Distribution({}), pyproject)
+        assert len(dist.ext_modules) == 1
+        assert dist.ext_modules[0].name == "my.ext"
+        assert set(dist.ext_modules[0].sources) == {"hello.c", "world.c"}
+
+
 class TestDeprecatedFields:
     def test_namespace_packages(self, tmp_path):
         pyproject = tmp_path / "pyproject.toml"
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index ca6af9667e..de257db80c 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -1332,6 +1332,16 @@ def test_from_simple_string_uses_shlex(self):
         assert len(cmd) == 2
         assert '"' not in cmd.as_header()
 
+    def test_from_param_raises_expected_error(self) -> None:
+        """
+        from_param should raise its own TypeError when the argument's type is unsupported
+        """
+        with pytest.raises(TypeError) as exc_info:
+            ei.CommandSpec.from_param(object())  # type: ignore[arg-type] # We want a type error here
+        assert (
+            str(exc_info.value) == "Argument has an unsupported type "
+        ), exc_info.value
+
 
 class TestWindowsScriptWriter:
     def test_header(self):
diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py
deleted file mode 100644
index b0e7d67b5e..0000000000
--- a/setuptools/tests/test_integration.py
+++ /dev/null
@@ -1,121 +0,0 @@
-"""Run some integration tests.
-
-Try to install a few packages.
-"""
-
-import glob
-import os
-import sys
-import urllib.request
-
-import pytest
-
-from setuptools.command import easy_install as easy_install_pkg
-from setuptools.command.easy_install import easy_install
-from setuptools.dist import Distribution
-
-pytestmark = pytest.mark.skipif(
-    'platform.python_implementation() == "PyPy" and platform.system() == "Windows"',
-    reason="pypa/setuptools#2496",
-)
-
-
-def setup_module(module):
-    packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient'
-    for pkg in packages:
-        try:
-            __import__(pkg)
-            tmpl = "Integration tests cannot run when {pkg} is installed"
-            pytest.skip(tmpl.format(**locals()))
-        except ImportError:
-            pass
-
-    try:
-        urllib.request.urlopen('https://pypi.python.org/pypi')
-    except Exception as exc:
-        pytest.skip(str(exc))
-
-
-@pytest.fixture
-def install_context(request, tmpdir, monkeypatch):
-    """Fixture to set up temporary installation directory."""
-    # Save old values so we can restore them.
-    new_cwd = tmpdir.mkdir('cwd')
-    user_base = tmpdir.mkdir('user_base')
-    user_site = tmpdir.mkdir('user_site')
-    install_dir = tmpdir.mkdir('install_dir')
-
-    def fin():
-        # undo the monkeypatch, particularly needed under
-        # windows because of kept handle on cwd
-        monkeypatch.undo()
-        new_cwd.remove()
-        user_base.remove()
-        user_site.remove()
-        install_dir.remove()
-
-    request.addfinalizer(fin)
-
-    # Change the environment and site settings to control where the
-    # files are installed and ensure we do not overwrite anything.
-    monkeypatch.chdir(new_cwd)
-    monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath)
-    monkeypatch.setattr('site.USER_BASE', user_base.strpath)
-    monkeypatch.setattr('site.USER_SITE', user_site.strpath)
-    monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath])
-    monkeypatch.setenv('PYTHONPATH', str(os.path.pathsep.join(sys.path)))
-
-    # Set up the command for performing the installation.
-    dist = Distribution()
-    cmd = easy_install(dist)
-    cmd.install_dir = install_dir.strpath
-    return cmd
-
-
-def _install_one(requirement, cmd, pkgname, modulename):
-    cmd.args = [requirement]
-    cmd.ensure_finalized()
-    cmd.run()
-    target = cmd.install_dir
-    dest_path = glob.glob(os.path.join(target, pkgname + '*.egg'))
-    assert dest_path
-    assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename))
-
-
-def test_stevedore(install_context):
-    _install_one('stevedore', install_context, 'stevedore', 'extension.py')
-
-
-@pytest.mark.xfail
-def test_virtualenvwrapper(install_context):
-    _install_one(
-        'virtualenvwrapper', install_context, 'virtualenvwrapper', 'hook_loader.py'
-    )
-
-
-def test_pbr(install_context):
-    _install_one('pbr', install_context, 'pbr', 'core.py')
-
-
-@pytest.mark.xfail
-@pytest.mark.filterwarnings("ignore:'encoding' argument not specified")
-# ^-- Dependency chain: `python-novaclient` < `oslo-utils` < `netifaces==0.11.0`
-#     netifaces' setup.py uses `open` without `encoding="utf-8"` which is hijacked by
-#     `setuptools.sandbox._open` and triggers the EncodingWarning.
-#     Can't use EncodingWarning in the filter, as it does not exist on Python < 3.10.
-def test_python_novaclient(install_context):
-    _install_one('python-novaclient', install_context, 'novaclient', 'base.py')
-
-
-def test_pyuri(install_context):
-    """
-    Install the pyuri package (version 0.3.1 at the time of writing).
-
-    This is also a regression test for issue #1016.
-    """
-    _install_one('pyuri', install_context, 'pyuri', 'uri.py')
-
-    pyuri = install_context.installed_projects['pyuri']
-
-    # The package data should be installed.
-    assert os.path.exists(os.path.join(pyuri.location, 'pyuri', 'uri.regex'))
diff --git a/setuptools/tests/test_msvc14.py b/setuptools/tests/test_msvc14.py
deleted file mode 100644
index 57d3cc38e8..0000000000
--- a/setuptools/tests/test_msvc14.py
+++ /dev/null
@@ -1,85 +0,0 @@
-"""
-Tests for msvc support module (msvc14 unit tests).
-"""
-
-import os
-import sys
-
-import pytest
-
-from distutils.errors import DistutilsPlatformError
-
-
-@pytest.mark.skipif(sys.platform != "win32", reason="These tests are only for win32")
-class TestMSVC14:
-    """Python 3.8 "distutils/tests/test_msvccompiler.py" backport"""
-
-    def test_no_compiler(self):
-        import setuptools.msvc as _msvccompiler
-
-        # makes sure query_vcvarsall raises
-        # a DistutilsPlatformError if the compiler
-        # is not found
-
-        def _find_vcvarsall(plat_spec):
-            return None, None
-
-        old_find_vcvarsall = _msvccompiler._msvc14_find_vcvarsall
-        _msvccompiler._msvc14_find_vcvarsall = _find_vcvarsall
-        try:
-            pytest.raises(
-                DistutilsPlatformError,
-                _msvccompiler._msvc14_get_vc_env,
-                'wont find this version',
-            )
-        finally:
-            _msvccompiler._msvc14_find_vcvarsall = old_find_vcvarsall
-
-    def test_get_vc_env_unicode(self):
-        import setuptools.msvc as _msvccompiler
-
-        test_var = 'ṰḖṤṪ┅ṼẨṜ'
-        test_value = '₃⁴₅'
-
-        # Ensure we don't early exit from _get_vc_env
-        old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None)
-        os.environ[test_var] = test_value
-        try:
-            env = _msvccompiler._msvc14_get_vc_env('x86')
-            assert test_var.lower() in env
-            assert test_value == env[test_var.lower()]
-        finally:
-            os.environ.pop(test_var)
-            if old_distutils_use_sdk:
-                os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk
-
-    def test_get_vc2017(self):
-        import setuptools.msvc as _msvccompiler
-
-        # This function cannot be mocked, so pass it if we find VS 2017
-        # and mark it skipped if we do not.
-        version, path = _msvccompiler._msvc14_find_vc2017()
-        if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') == 'Visual Studio 2017':
-            assert version
-        if version:
-            assert version >= 15
-            assert os.path.isdir(path)
-        else:
-            pytest.skip("VS 2017 is not installed")
-
-    def test_get_vc2015(self):
-        import setuptools.msvc as _msvccompiler
-
-        # This function cannot be mocked, so pass it if we find VS 2015
-        # and mark it skipped if we do not.
-        version, path = _msvccompiler._msvc14_find_vc2015()
-        if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') in [
-            'Visual Studio 2015',
-            'Visual Studio 2017',
-        ]:
-            assert version
-        if version:
-            assert version >= 14
-            assert os.path.isdir(path)
-        else:
-            pytest.skip("VS 2015 is not installed")
diff --git a/setuptools/tests/test_register.py b/setuptools/tests/test_register.py
deleted file mode 100644
index 0c7d109d31..0000000000
--- a/setuptools/tests/test_register.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from unittest import mock
-
-import pytest
-
-from setuptools.command.register import register
-from setuptools.dist import Distribution
-from setuptools.errors import RemovedCommandError
-
-
-class TestRegister:
-    def test_register_exception(self):
-        """Ensure that the register command has been properly removed."""
-        dist = Distribution()
-        dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())]
-
-        cmd = register(dist)
-
-        with pytest.raises(RemovedCommandError):
-            cmd.run()
diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py
index 7fc9dae872..30347190db 100644
--- a/setuptools/tests/test_sdist.py
+++ b/setuptools/tests/test_sdist.py
@@ -4,11 +4,13 @@
 import io
 import logging
 import os
+import pathlib
 import sys
 import tarfile
 import tempfile
 import unicodedata
 from inspect import cleandoc
+from pathlib import Path
 from unittest import mock
 
 import jaraco.path
@@ -425,6 +427,46 @@ def test_defaults_case_sensitivity(self, source_dir):
         assert 'setup.py' not in manifest, manifest
         assert 'setup.cfg' not in manifest, manifest
 
+    def test_exclude_dev_only_cache_folders(self, source_dir):
+        included = {
+            # Emulate problem in https://github.com/pypa/setuptools/issues/4601
+            "MANIFEST.in": (
+                "global-include LICEN[CS]E* COPYING* NOTICE* AUTHORS*\n"
+                "global-include *.txt\n"
+            ),
+            # For the sake of being conservative and limiting unforeseen side-effects
+            # we just exclude dev-only cache folders at the root of the repository:
+            "test/.venv/lib/python3.9/site-packages/bar-2.dist-info/AUTHORS.rst": "",
+            "src/.nox/py/lib/python3.12/site-packages/bar-2.dist-info/COPYING.txt": "",
+            "doc/.tox/default/lib/python3.11/site-packages/foo-4.dist-info/LICENSE": "",
+            # Let's test against false positives with similarly named files:
+            ".venv-requirements.txt": "",
+            ".tox-coveragerc.txt": "",
+            ".noxy/coveragerc.txt": "",
+        }
+
+        excluded = {
+            # .tox/.nox/.venv are well-know folders present at the root of Python repos
+            # and therefore should be excluded
+            ".tox/release/lib/python3.11/site-packages/foo-4.dist-info/LICENSE": "",
+            ".nox/py/lib/python3.12/site-packages/bar-2.dist-info/COPYING.txt": "",
+            ".venv/lib/python3.9/site-packages/bar-2.dist-info/AUTHORS.rst": "",
+        }
+
+        for file, content in {**excluded, **included}.items():
+            Path(source_dir, file).parent.mkdir(parents=True, exist_ok=True)
+            Path(source_dir, file).write_text(content, encoding="utf-8")
+
+        cmd = self.setup_with_extension()
+        self.assert_package_data_in_manifest(cmd)
+        manifest = {f.replace(os.sep, '/') for f in cmd.filelist.files}
+        for path in excluded:
+            assert os.path.exists(path)
+            assert path not in manifest, (path, manifest)
+        for path in included:
+            assert os.path.exists(path)
+            assert path in manifest, (path, manifest)
+
     @fail_on_ascii
     def test_manifest_is_written_with_utf8_encoding(self):
         # Test for #303.
@@ -781,6 +823,21 @@ def get_source_files(self):
         manifest = cmd.filelist.files
         assert '.myfile~' in manifest
 
+    @pytest.mark.skipif("os.environ.get('SETUPTOOLS_USE_DISTUTILS') == 'stdlib'")
+    def test_build_base_pathlib(self, source_dir):
+        """
+        Ensure if build_base is a pathlib.Path, the build still succeeds.
+        """
+        dist = Distribution({
+            **SETUP_ATTRS,
+            "script_name": "setup.py",
+            "options": {"build": {"build_base": pathlib.Path('build')}},
+        })
+        cmd = sdist(dist)
+        cmd.ensure_finalized()
+        with quiet():
+            cmd.run()
+
 
 def test_default_revctrl():
     """
@@ -915,4 +972,4 @@ def test_sanity_check_setuptools_own_sdist(setuptools_sdist):
 
     # setuptools sdist should not include the .tox folder
     tox_files = [name for name in files if ".tox" in name]
-    assert len(tox_files) == 0
+    assert len(tox_files) == 0, f"not empty {tox_files}"
diff --git a/setuptools/tests/test_upload.py b/setuptools/tests/test_upload.py
deleted file mode 100644
index cbcd455c41..0000000000
--- a/setuptools/tests/test_upload.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from unittest import mock
-
-import pytest
-
-from setuptools.command.upload import upload
-from setuptools.dist import Distribution
-from setuptools.errors import RemovedCommandError
-
-
-class TestUpload:
-    def test_upload_exception(self):
-        """Ensure that the register command has been properly removed."""
-        dist = Distribution()
-        dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())]
-
-        cmd = upload(dist)
-
-        with pytest.raises(RemovedCommandError):
-            cmd.run()
diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py
index cdc10f5004..b02949baf9 100644
--- a/setuptools/tests/test_virtualenv.py
+++ b/setuptools/tests/test_virtualenv.py
@@ -110,5 +110,4 @@ def test_no_missing_dependencies(bare_venv, request):
     Quick and dirty test to ensure all external dependencies are vendored.
     """
     setuptools_dir = request.config.rootdir
-    for command in ('upload',):  # sorted(distutils.command.__all__):
-        bare_venv.run(['python', 'setup.py', command, '-h'], cwd=setuptools_dir)
+    bare_venv.run(['python', 'setup.py', '--help'], cwd=setuptools_dir)
diff --git a/tools/ppc64le-patch.py b/tools/ppc64le-patch.py
deleted file mode 100644
index a0b04ce502..0000000000
--- a/tools/ppc64le-patch.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-Except on bionic, Travis Linux base image for PPC64LE
-platform lacks the proper
-permissions to the directory ~/.cache/pip/wheels that allow
-the user running travis build to install pip packages.
-TODO: is someone tracking this issue? Maybe just move to bionic?
-"""
-
-import collections
-import os
-import subprocess
-
-
-def patch():
-    env = collections.defaultdict(str, os.environ)
-    if env['TRAVIS_CPU_ARCH'] != 'ppc64le':
-        return
-    cmd = [
-        'sudo',
-        'chown',
-        '-Rfv',
-        '{USER}:{GROUP}'.format_map(env),
-        os.path.expanduser('~/.cache/pip/wheels'),
-    ]
-    subprocess.Popen(cmd)
-
-
-__name__ == '__main__' and patch()
diff --git a/tox.ini b/tox.ini
index cbade4ff46..b767cd4fbc 100644
--- a/tox.ini
+++ b/tox.ini
@@ -58,9 +58,7 @@ deps =
 	importlib_resources < 6  # twisted/towncrier#528 (waiting for release)
 commands =
 	python -m sphinx -W --keep-going . {toxinidir}/build/html
-	python -m sphinxlint \
-		# workaround for sphinx-contrib/sphinx-lint#83
-		--jobs 1
+	python -m sphinxlint
 
 [testenv:finalize]
 description = assemble changelog and tag a release