diff options
Diffstat (limited to 'setuptools')
69 files changed, 8278 insertions, 645 deletions
diff --git a/setuptools/__init__.py b/setuptools/__init__.py index 06991b65..cff04323 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -1,15 +1,15 @@ """Extensions to the 'distutils' for large or complex distributions""" -from fnmatch import fnmatchcase import functools import os import re +import warnings import _distutils_hack.override # noqa: F401 import distutils.core from distutils.errors import DistutilsOptionError -from distutils.util import convert_path +from distutils.util import convert_path as _convert_path from ._deprecation_warning import SetuptoolsDeprecationWarning @@ -17,6 +17,7 @@ import setuptools.version from setuptools.extension import Extension from setuptools.dist import Distribution from setuptools.depends import Require +from setuptools.discovery import PackageFinder, PEP420PackageFinder from . import monkey from . import logging @@ -37,85 +38,6 @@ __version__ = setuptools.version.__version__ bootstrap_install_from = None -class PackageFinder: - """ - Generate a list of all Python packages found within a directory - """ - - @classmethod - def find(cls, where='.', exclude=(), include=('*',)): - """Return a list all Python packages found within directory 'where' - - 'where' is the root directory which will be searched for packages. It - should be supplied as a "cross-platform" (i.e. URL-style) path; it will - be converted to the appropriate local path syntax. - - 'exclude' is a sequence of package names to exclude; '*' can be used - as a wildcard in the names, such that 'foo.*' will exclude all - subpackages of 'foo' (but not 'foo' itself). - - 'include' is a sequence of package names to include. If it's - specified, only the named packages will be included. If it's not - specified, all found packages will be included. 'include' can contain - shell style wildcard patterns just like 'exclude'. - """ - - return list( - cls._find_packages_iter( - convert_path(where), - cls._build_filter('ez_setup', '*__pycache__', *exclude), - cls._build_filter(*include), - ) - ) - - @classmethod - def _find_packages_iter(cls, where, exclude, include): - """ - All the packages found in 'where' that pass the 'include' filter, but - not the 'exclude' filter. - """ - for root, dirs, files in os.walk(where, followlinks=True): - # Copy dirs to iterate over it, then empty dirs. - all_dirs = dirs[:] - dirs[:] = [] - - for dir in all_dirs: - full_path = os.path.join(root, dir) - rel_path = os.path.relpath(full_path, where) - package = rel_path.replace(os.path.sep, '.') - - # Skip directory trees that are not valid packages - if '.' in dir or not cls._looks_like_package(full_path): - continue - - # Should this package be included? - if include(package) and not exclude(package): - yield package - - # Keep searching subdirectories, as there may be more packages - # down there, even if the parent was excluded. - dirs.append(dir) - - @staticmethod - def _looks_like_package(path): - """Does a directory look like a package?""" - return os.path.isfile(os.path.join(path, '__init__.py')) - - @staticmethod - def _build_filter(*patterns): - """ - Given a list of patterns, return a callable that will be true only if - the input matches at least one of the patterns. - """ - return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) - - -class PEP420PackageFinder(PackageFinder): - @staticmethod - def _looks_like_package(path): - return True - - find_packages = PackageFinder.find find_namespace_packages = PEP420PackageFinder.find @@ -133,6 +55,16 @@ def _install_setup_requires(attrs): _incl = 'dependency_links', 'setup_requires' filtered = {k: attrs[k] for k in set(_incl) & set(attrs)} super().__init__(filtered) + # Prevent accidentally triggering discovery with incomplete set of attrs + self.set_defaults._disable() + + def _get_project_config_files(self, filenames=None): + """Ignore ``pyproject.toml``, they are not related to setup_requires""" + try: + cfg, toml = super()._split_standard_project_metadata(filenames) + return cfg, () + except Exception: + return filenames, () def finalize_options(self): """ @@ -236,6 +168,19 @@ def findall(dir=os.curdir): return list(files) +@functools.wraps(_convert_path) +def convert_path(pathname): + from inspect import cleandoc + + msg = """ + The function `convert_path` is considered internal and not part of the public API. + Its direct usage by 3rd-party packages is considered deprecated and the function + may be removed in the future. + """ + warnings.warn(cleandoc(msg), SetuptoolsDeprecationWarning) + return _convert_path(pathname) + + class sic(str): """Treat this string as-is (https://en.wikipedia.org/wiki/Sic)""" diff --git a/setuptools/_distutils/_macos_compat.py b/setuptools/_distutils/_macos_compat.py new file mode 100644 index 00000000..17769e91 --- /dev/null +++ b/setuptools/_distutils/_macos_compat.py @@ -0,0 +1,12 @@ +import sys +import importlib + + +def bypass_compiler_fixup(cmd, args): + return cmd + + +if sys.platform == 'darwin': + compiler_fixup = importlib.import_module('_osx_support').compiler_fixup +else: + compiler_fixup = bypass_compiler_fixup diff --git a/setuptools/_distutils/command/build_scripts.py b/setuptools/_distutils/command/build_scripts.py index e3312cf0..e56511da 100644 --- a/setuptools/_distutils/command/build_scripts.py +++ b/setuptools/_distutils/command/build_scripts.py @@ -2,7 +2,8 @@ Implements the Distutils 'build_scripts' command.""" -import os, re +import os +import re from stat import ST_MODE from distutils import sysconfig from distutils.core import Command @@ -11,8 +12,14 @@ from distutils.util import convert_path from distutils import log import tokenize -# check if Python is called on the first line with this expression -first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') +shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$') +""" +Pattern matching a Python interpreter indicated in first line of a script. +""" + +# for Setuptools compatibility +first_line_re = shebang_pattern + class build_scripts(Command): @@ -26,13 +33,11 @@ class build_scripts(Command): boolean_options = ['force'] - def initialize_options(self): self.build_dir = None self.scripts = None self.force = None self.executable = None - self.outfiles = None def finalize_options(self): self.set_undefined_options('build', @@ -49,104 +54,117 @@ class build_scripts(Command): return self.copy_scripts() - def copy_scripts(self): - r"""Copy each script listed in 'self.scripts'; if it's marked as a - Python script in the Unix way (first line matches 'first_line_re', - ie. starts with "\#!" and contains "python"), then adjust the first - line to refer to the current Python interpreter as we copy. + """ + Copy each script listed in ``self.scripts``. + + If a script is marked as a Python script (first line matches + 'shebang_pattern', i.e. starts with ``#!`` and contains + "python"), then adjust in the copy the first line to refer to + the current Python interpreter. """ self.mkpath(self.build_dir) outfiles = [] updated_files = [] for script in self.scripts: - adjust = False - script = convert_path(script) - outfile = os.path.join(self.build_dir, os.path.basename(script)) - outfiles.append(outfile) - - if not self.force and not newer(script, outfile): - log.debug("not copying %s (up-to-date)", script) - continue - - # Always open the file, but ignore failures in dry-run mode -- - # that way, we'll get accurate feedback if we can read the - # script. - try: - f = open(script, "rb") - except OSError: - if not self.dry_run: - raise - f = None - else: - encoding, lines = tokenize.detect_encoding(f.readline) - f.seek(0) - first_line = f.readline() - if not first_line: - self.warn("%s is an empty file (skipping)" % script) - continue - - match = first_line_re.match(first_line) - if match: - adjust = True - post_interp = match.group(1) or b'' - - if adjust: - log.info("copying and adjusting %s -> %s", script, - self.build_dir) - updated_files.append(outfile) - if not self.dry_run: - if not sysconfig.python_build: - executable = self.executable - else: - executable = os.path.join( - sysconfig.get_config_var("BINDIR"), - "python%s%s" % (sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"))) - executable = os.fsencode(executable) - shebang = b"#!" + executable + post_interp + b"\n" - # Python parser starts to read a script using UTF-8 until - # it gets a #coding:xxx cookie. The shebang has to be the - # first line of a file, the #coding:xxx cookie cannot be - # written before. So the shebang has to be decodable from - # UTF-8. - try: - shebang.decode('utf-8') - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from utf-8".format(shebang)) - # If the script is encoded to a custom encoding (use a - # #coding:xxx cookie), the shebang has to be decodable from - # the script encoding too. - try: - shebang.decode(encoding) - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from the script encoding ({})" - .format(shebang, encoding)) - with open(outfile, "wb") as outf: - outf.write(shebang) - outf.writelines(f.readlines()) - if f: - f.close() - else: - if f: - f.close() - updated_files.append(outfile) - self.copy_file(script, outfile) - - if os.name == 'posix': - for file in outfiles: - if self.dry_run: - log.info("changing mode of %s", file) - else: - oldmode = os.stat(file)[ST_MODE] & 0o7777 - newmode = (oldmode | 0o555) & 0o7777 - if newmode != oldmode: - log.info("changing mode of %s from %o to %o", - file, oldmode, newmode) - os.chmod(file, newmode) - # XXX should we modify self.outfiles? + self._copy_script(script, outfiles, updated_files) + + self._change_modes(outfiles) + return outfiles, updated_files + + def _copy_script(self, script, outfiles, updated_files): + shebang_match = None + script = convert_path(script) + outfile = os.path.join(self.build_dir, os.path.basename(script)) + outfiles.append(outfile) + + if not self.force and not newer(script, outfile): + log.debug("not copying %s (up-to-date)", script) + return + + # Always open the file, but ignore failures in dry-run mode + # in order to attempt to copy directly. + try: + f = tokenize.open(script) + except OSError: + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: + self.warn("%s is an empty file (skipping)" % script) + return + + shebang_match = shebang_pattern.match(first_line) + + updated_files.append(outfile) + if shebang_match: + log.info("copying and adjusting %s -> %s", script, + self.build_dir) + if not self.dry_run: + if not sysconfig.python_build: + executable = self.executable + else: + executable = os.path.join( + sysconfig.get_config_var("BINDIR"), + "python%s%s" % ( + sysconfig.get_config_var("VERSION"), + sysconfig.get_config_var("EXE"))) + post_interp = shebang_match.group(1) or '' + shebang = "#!" + executable + post_interp + "\n" + self._validate_shebang(shebang, f.encoding) + with open(outfile, "w", encoding=f.encoding) as outf: + outf.write(shebang) + outf.writelines(f.readlines()) + if f: + f.close() + else: + if f: + f.close() + self.copy_file(script, outfile) + + def _change_modes(self, outfiles): + if os.name != 'posix': + return + + for file in outfiles: + self._change_mode(file) + + def _change_mode(self, file): + if self.dry_run: + log.info("changing mode of %s", file) + return + + oldmode = os.stat(file)[ST_MODE] & 0o7777 + newmode = (oldmode | 0o555) & 0o7777 + if newmode != oldmode: + log.info("changing mode of %s from %o to %o", + file, oldmode, newmode) + os.chmod(file, newmode) + + @staticmethod + def _validate_shebang(shebang, encoding): + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be encodable to + # UTF-8. + try: + shebang.encode('utf-8') + except UnicodeEncodeError: + raise ValueError( + "The shebang ({!r}) is not encodable " + "to utf-8".format(shebang)) + + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be encodable to + # the script encoding too. + try: + shebang.encode(encoding) + except UnicodeEncodeError: + raise ValueError( + "The shebang ({!r}) is not encodable " + "to the script encoding ({})" + .format(shebang, encoding)) diff --git a/setuptools/_distutils/command/check.py b/setuptools/_distutils/command/check.py index 525540b6..af311ca9 100644 --- a/setuptools/_distutils/command/check.py +++ b/setuptools/_distutils/command/check.py @@ -2,6 +2,8 @@ Implements the Distutils 'check' command. """ +from email.utils import getaddresses + from distutils.core import Command from distutils.errors import DistutilsSetupError @@ -96,19 +98,39 @@ class check(Command): if missing: self.warn("missing required meta-data: %s" % ', '.join(missing)) - if metadata.author: - if not metadata.author_email: - self.warn("missing meta-data: if 'author' supplied, " + - "'author_email' should be supplied too") - elif metadata.maintainer: - if not metadata.maintainer_email: - self.warn("missing meta-data: if 'maintainer' supplied, " + - "'maintainer_email' should be supplied too") - else: + if not ( + self._check_contact("author", metadata) or + self._check_contact("maintainer", metadata) + ): self.warn("missing meta-data: either (author and author_email) " + "or (maintainer and maintainer_email) " + "should be supplied") + def _check_contact(self, kind, metadata): + """ + Returns True if the contact's name is specified and False otherwise. + This function will warn if the contact's email is not specified. + """ + name = getattr(metadata, kind) or '' + email = getattr(metadata, kind + '_email') or '' + + msg = ("missing meta-data: if '{}' supplied, " + + "'{}' should be supplied too") + + if name and email: + return True + + if name: + self.warn(msg.format(kind, kind + '_email')) + return True + + addresses = [(alias, addr) for alias, addr in getaddresses([email])] + if any(alias and addr for alias, addr in addresses): + # The contact's name can be encoded in the email: `Name <email>` + return True + + return False + def check_restructuredtext(self): """Checks if the long string fields are reST-compliant.""" data = self.distribution.get_long_description() diff --git a/setuptools/_distutils/sysconfig.py b/setuptools/_distutils/sysconfig.py index 4a77a431..9fad3835 100644 --- a/setuptools/_distutils/sysconfig.py +++ b/setuptools/_distutils/sysconfig.py @@ -436,51 +436,6 @@ def expand_makefile_vars(s, vars): _config_vars = None -_sysconfig_name_tmpl = '_sysconfigdata_{abi}_{platform}_{multiarch}' - - -def _init_posix(): - """Initialize the module as appropriate for POSIX systems.""" - # _sysconfigdata is generated at build time, see the sysconfig module - name = os.environ.get( - '_PYTHON_SYSCONFIGDATA_NAME', - _sysconfig_name_tmpl.format( - abi=sys.abiflags, - platform=sys.platform, - multiarch=getattr(sys.implementation, '_multiarch', ''), - ), - ) - try: - _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0) - except ImportError: - # Python 3.5 and pypy 7.3.1 - _temp = __import__( - '_sysconfigdata', globals(), locals(), ['build_time_vars'], 0) - build_time_vars = _temp.build_time_vars - global _config_vars - _config_vars = {} - _config_vars.update(build_time_vars) - - -def _init_nt(): - """Initialize the module as appropriate for NT""" - g = {} - # set basic install directories - g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1) - g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1) - - # XXX hmmm.. a normal install puts include files here - g['INCLUDEPY'] = get_python_inc(plat_specific=0) - - g['EXT_SUFFIX'] = _imp.extension_suffixes()[0] - g['EXE'] = ".exe" - g['VERSION'] = get_python_version().replace(".", "") - g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable)) - - global _config_vars - _config_vars = g - - def get_config_vars(*args): """With no arguments, return a dictionary of all configuration variables relevant for the current platform. Generally this includes @@ -493,60 +448,7 @@ def get_config_vars(*args): """ global _config_vars if _config_vars is None: - func = globals().get("_init_" + os.name) - if func: - func() - else: - _config_vars = {} - - # Normalized versions of prefix and exec_prefix are handy to have; - # in fact, these are the standard versions used most places in the - # Distutils. - _config_vars['prefix'] = PREFIX - _config_vars['exec_prefix'] = EXEC_PREFIX - - if not IS_PYPY: - # For backward compatibility, see issue19555 - SO = _config_vars.get('EXT_SUFFIX') - if SO is not None: - _config_vars['SO'] = SO - - # Always convert srcdir to an absolute path - srcdir = _config_vars.get('srcdir', project_base) - if os.name == 'posix': - if python_build: - # If srcdir is a relative path (typically '.' or '..') - # then it should be interpreted relative to the directory - # containing Makefile. - base = os.path.dirname(get_makefile_filename()) - srcdir = os.path.join(base, srcdir) - else: - # srcdir is not meaningful since the installation is - # spread about the filesystem. We choose the - # directory containing the Makefile since we know it - # exists. - srcdir = os.path.dirname(get_makefile_filename()) - _config_vars['srcdir'] = os.path.abspath(os.path.normpath(srcdir)) - - # Convert srcdir into an absolute path if it appears necessary. - # Normally it is relative to the build directory. However, during - # testing, for example, we might be running a non-installed python - # from a different directory. - if python_build and os.name == "posix": - base = project_base - if (not os.path.isabs(_config_vars['srcdir']) and - base != os.getcwd()): - # srcdir is relative and we are not in the same directory - # as the executable. Assume executable is in the build - # directory and make srcdir absolute. - srcdir = os.path.join(base, _config_vars['srcdir']) - _config_vars['srcdir'] = os.path.normpath(srcdir) - - # OS X platforms require special customization to handle - # multi-architecture, multi-os-version installers - if sys.platform == 'darwin': - import _osx_support - _osx_support.customize_config_vars(_config_vars) + _config_vars = sysconfig.get_config_vars().copy() if args: vals = [] diff --git a/setuptools/_distutils/tests/test_check.py b/setuptools/_distutils/tests/test_check.py index 91bcdceb..b41dba3d 100644 --- a/setuptools/_distutils/tests/test_check.py +++ b/setuptools/_distutils/tests/test_check.py @@ -71,6 +71,28 @@ class CheckTestCase(support.LoggingSilencer, cmd = self._run(metadata) self.assertEqual(cmd._warnings, 0) + def test_check_author_maintainer(self): + for kind in ("author", "maintainer"): + # ensure no warning when author_email or maintainer_email is given + # (the spec allows these fields to take the form "Name <email>") + metadata = {'url': 'xxx', + kind + '_email': 'Name <name@email.com>', + 'name': 'xxx', 'version': 'xxx'} + cmd = self._run(metadata) + self.assertEqual(cmd._warnings, 0) + + # the check should warn if only email is given and it does not + # contain the name + metadata[kind + '_email'] = 'name@email.com' + cmd = self._run(metadata) + self.assertEqual(cmd._warnings, 1) + + # the check should warn if only the name is given + metadata[kind] = "Name" + del metadata[kind + '_email'] + cmd = self._run(metadata) + self.assertEqual(cmd._warnings, 1) + @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") def test_check_document(self): pkg_info, dist = self.create_dist() diff --git a/setuptools/_distutils/tests/test_unixccompiler.py b/setuptools/_distutils/tests/test_unixccompiler.py index 4574f77f..c8b4c149 100644 --- a/setuptools/_distutils/tests/test_unixccompiler.py +++ b/setuptools/_distutils/tests/test_unixccompiler.py @@ -3,6 +3,7 @@ import os import sys import unittest from test.support import run_unittest +from unittest.mock import patch from .py38compat import EnvironmentVarGuard @@ -215,6 +216,42 @@ class UnixCCompilerTestCase(support.TempdirManager, unittest.TestCase): self.assertEqual(self.cc.linker_so[0], 'my_cc') @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") + def test_cc_overrides_ldshared_for_cxx_correctly(self): + """ + Ensure that setting CC env variable also changes default linker + correctly when building C++ extensions. + + pypa/distutils#126 + """ + def gcv(v): + if v == 'LDSHARED': + return 'gcc-4.2 -bundle -undefined dynamic_lookup ' + elif v == 'CXX': + return 'g++-4.2' + return 'gcc-4.2' + + def gcvs(*args, _orig=sysconfig.get_config_vars): + if args: + return list(map(sysconfig.get_config_var, args)) + return _orig() + + sysconfig.get_config_var = gcv + sysconfig.get_config_vars = gcvs + with patch.object(self.cc, 'spawn', return_value=None) as mock_spawn, \ + patch.object(self.cc, '_need_link', return_value=True), \ + patch.object(self.cc, 'mkpath', return_value=None), \ + EnvironmentVarGuard() as env: + env['CC'] = 'ccache my_cc' + env['CXX'] = 'my_cxx' + del env['LDSHARED'] + sysconfig.customize_compiler(self.cc) + self.assertEqual(self.cc.linker_so[0:2], ['ccache', 'my_cc']) + self.cc.link(None, [], 'a.out', target_lang='c++') + call_args = mock_spawn.call_args[0][0] + expected = ['my_cxx', '-bundle', '-undefined', 'dynamic_lookup'] + assert call_args[:4] == expected + + @unittest.skipIf(sys.platform == 'win32', "can't test on Windows") def test_explicit_ldshared(self): # Issue #18080: # ensure that setting CC env variable does not change diff --git a/setuptools/_distutils/unixccompiler.py b/setuptools/_distutils/unixccompiler.py index a07e5988..715408f5 100644 --- a/setuptools/_distutils/unixccompiler.py +++ b/setuptools/_distutils/unixccompiler.py @@ -22,9 +22,7 @@ from distutils.ccompiler import \ from distutils.errors import \ DistutilsExecError, CompileError, LibError, LinkError from distutils import log - -if sys.platform == 'darwin': - import _osx_support +from ._macos_compat import compiler_fixup # XXX Things not currently handled: # * optimization/debug/warning flags; we just use whatever's in Python's @@ -42,6 +40,66 @@ if sys.platform == 'darwin': # options and carry on. +def _split_env(cmd): + """ + For macOS, split command into 'env' portion (if any) + and the rest of the linker command. + + >>> _split_env(['a', 'b', 'c']) + ([], ['a', 'b', 'c']) + >>> _split_env(['/usr/bin/env', 'A=3', 'gcc']) + (['/usr/bin/env', 'A=3'], ['gcc']) + """ + pivot = 0 + if os.path.basename(cmd[0]) == "env": + pivot = 1 + while '=' in cmd[pivot]: + pivot += 1 + return cmd[:pivot], cmd[pivot:] + + +def _split_aix(cmd): + """ + AIX platforms prefix the compiler with the ld_so_aix + script, so split that from the linker command. + + >>> _split_aix(['a', 'b', 'c']) + ([], ['a', 'b', 'c']) + >>> _split_aix(['/bin/foo/ld_so_aix', 'gcc']) + (['/bin/foo/ld_so_aix'], ['gcc']) + """ + pivot = os.path.basename(cmd[0]) == 'ld_so_aix' + return cmd[:pivot], cmd[pivot:] + + +def _linker_params(linker_cmd, compiler_cmd): + """ + The linker command usually begins with the compiler + command (possibly multiple elements), followed by zero or more + params for shared library building. + + If the LDSHARED env variable overrides the linker command, + however, the commands may not match. + + Return the best guess of the linker parameters by stripping + the linker command. If the compiler command does not + match the linker command, assume the linker command is + just the first element. + + >>> _linker_params('gcc foo bar'.split(), ['gcc']) + ['foo', 'bar'] + >>> _linker_params('gcc foo bar'.split(), ['other']) + ['foo', 'bar'] + >>> _linker_params('ccache gcc foo bar'.split(), 'ccache gcc'.split()) + ['foo', 'bar'] + >>> _linker_params(['gcc'], ['gcc']) + [] + """ + c_len = len(compiler_cmd) + pivot = c_len if linker_cmd[:c_len] == compiler_cmd else 1 + return linker_cmd[pivot:] + + class UnixCCompiler(CCompiler): compiler_type = 'unix' @@ -109,10 +167,8 @@ class UnixCCompiler(CCompiler): raise CompileError(msg) def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - compiler_so = self.compiler_so - if sys.platform == 'darwin': - compiler_so = _osx_support.compiler_fixup(compiler_so, - cc_args + extra_postargs) + compiler_so = compiler_fixup( + self.compiler_so, cc_args + extra_postargs) try: self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) @@ -173,33 +229,22 @@ class UnixCCompiler(CCompiler): ld_args.extend(extra_postargs) self.mkpath(os.path.dirname(output_filename)) try: - if target_desc == CCompiler.EXECUTABLE: - linker = self.linker_exe[:] - else: - linker = self.linker_so[:] + # Select a linker based on context: linker_exe when + # building an executable or linker_so (with shared options) + # when building a shared library. + building_exe = target_desc == CCompiler.EXECUTABLE + linker = (self.linker_exe if building_exe else self.linker_so)[:] + if target_lang == "c++" and self.compiler_cxx: - # skip over environment variable settings if /usr/bin/env - # is used to set up the linker's environment. - # This is needed on OSX. Note: this assumes that the - # normal and C++ compiler have the same environment - # settings. - i = 0 - if os.path.basename(linker[0]) == "env": - i = 1 - while '=' in linker[i]: - i += 1 - - if os.path.basename(linker[i]) == 'ld_so_aix': - # AIX platforms prefix the compiler with the ld_so_aix - # script, so we need to adjust our linker index - offset = 1 - else: - offset = 0 - - linker[i+offset] = self.compiler_cxx[i] - - if sys.platform == 'darwin': - linker = _osx_support.compiler_fixup(linker, ld_args) + env, linker_ne = _split_env(linker) + aix, linker_na = _split_aix(linker_ne) + _, compiler_cxx_ne = _split_env(self.compiler_cxx) + _, linker_exe_ne = _split_env(self.linker_exe) + + params = _linker_params(linker_na, linker_exe_ne) + linker = env + aix + compiler_cxx_ne + params + + linker = compiler_fixup(linker, ld_args) self.spawn(linker + ld_args) except DistutilsExecError as msg: diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/INSTALLER b/setuptools/_vendor/nspektr-0.3.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/LICENSE b/setuptools/_vendor/nspektr-0.3.0.dist-info/LICENSE new file mode 100644 index 00000000..353924be --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/METADATA b/setuptools/_vendor/nspektr-0.3.0.dist-info/METADATA new file mode 100644 index 00000000..aadc3749 --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/METADATA @@ -0,0 +1,57 @@ +Metadata-Version: 2.1 +Name: nspektr +Version: 0.3.0 +Summary: package inspector +Home-page: https://github.com/jaraco/nspektr +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: jaraco.context +Requires-Dist: jaraco.functools +Requires-Dist: more-itertools +Requires-Dist: packaging +Requires-Dist: importlib-metadata (>=3.6) ; python_version < "3.10" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/nspektr.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/nspektr.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/nspektr + +.. image:: https://github.com/jaraco/nspektr/workflows/tests/badge.svg + :target: https://github.com/jaraco/nspektr/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest +.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2022-informational + :target: https://blog.jaraco.com/skeleton + + diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/RECORD b/setuptools/_vendor/nspektr-0.3.0.dist-info/RECORD new file mode 100644 index 00000000..5e5de5eb --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/RECORD @@ -0,0 +1,11 @@ +nspektr-0.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+nspektr-0.3.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+nspektr-0.3.0.dist-info/METADATA,sha256=X0stV4vwFBDBxvzhBl4kAHVdGWPIjEitqAuTJItcQH0,2162
+nspektr-0.3.0.dist-info/RECORD,,
+nspektr-0.3.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+nspektr-0.3.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+nspektr-0.3.0.dist-info/top_level.txt,sha256=uEA20Ixo04XS3wOIt5-Jk5ZuMkBrtlleFipRr8Y1SjQ,8
+nspektr/__init__.py,sha256=d6-d-ZlGAQQP-MEi_NZMiyn2vLbq8Hw3HxICgm3X0Q8,3949
+nspektr/__pycache__/__init__.cpython-310.pyc,,
+nspektr/__pycache__/_compat.cpython-310.pyc,,
+nspektr/_compat.py,sha256=2QoozYhuhgow_NMUATmhoM-yppBV3jiZYQgdiP-ww0s,582
diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/REQUESTED b/setuptools/_vendor/nspektr-0.3.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/REQUESTED diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/WHEEL b/setuptools/_vendor/nspektr-0.3.0.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt b/setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt new file mode 100644 index 00000000..b10ef50a --- /dev/null +++ b/setuptools/_vendor/nspektr-0.3.0.dist-info/top_level.txt @@ -0,0 +1 @@ +nspektr diff --git a/setuptools/_vendor/nspektr/__init__.py b/setuptools/_vendor/nspektr/__init__.py new file mode 100644 index 00000000..938bbdb9 --- /dev/null +++ b/setuptools/_vendor/nspektr/__init__.py @@ -0,0 +1,145 @@ +import itertools +import functools +import contextlib + +from setuptools.extern.packaging.requirements import Requirement +from setuptools.extern.packaging.version import Version +from setuptools.extern.more_itertools import always_iterable +from setuptools.extern.jaraco.context import suppress +from setuptools.extern.jaraco.functools import apply + +from ._compat import metadata, repair_extras + + +def resolve(req: Requirement) -> metadata.Distribution: + """ + Resolve the requirement to its distribution. + + Ignore exception detail for Python 3.9 compatibility. + + >>> resolve(Requirement('pytest<3')) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + importlib.metadata.PackageNotFoundError: No package metadata was found for pytest<3 + """ + dist = metadata.distribution(req.name) + if not req.specifier.contains(Version(dist.version), prereleases=True): + raise metadata.PackageNotFoundError(str(req)) + dist.extras = req.extras # type: ignore + return dist + + +@apply(bool) +@suppress(metadata.PackageNotFoundError) +def is_satisfied(req: Requirement): + return resolve(req) + + +unsatisfied = functools.partial(itertools.filterfalse, is_satisfied) + + +class NullMarker: + @classmethod + def wrap(cls, req: Requirement): + return req.marker or cls() + + def evaluate(self, *args, **kwargs): + return True + + +def find_direct_dependencies(dist, extras=None): + """ + Find direct, declared dependencies for dist. + """ + simple = ( + req + for req in map(Requirement, always_iterable(dist.requires)) + if NullMarker.wrap(req).evaluate(dict(extra=None)) + ) + extra_deps = ( + req + for req in map(Requirement, always_iterable(dist.requires)) + for extra in always_iterable(getattr(dist, 'extras', extras)) + if NullMarker.wrap(req).evaluate(dict(extra=extra)) + ) + return itertools.chain(simple, extra_deps) + + +def traverse(items, visit): + """ + Given an iterable of items, traverse the items. + + For each item, visit is called to return any additional items + to include in the traversal. + """ + while True: + try: + item = next(items) + except StopIteration: + return + yield item + items = itertools.chain(items, visit(item)) + + +def find_req_dependencies(req): + with contextlib.suppress(metadata.PackageNotFoundError): + dist = resolve(req) + yield from find_direct_dependencies(dist) + + +def find_dependencies(dist, extras=None): + """ + Find all reachable dependencies for dist. + + dist is an importlib.metadata.Distribution (or similar). + TODO: create a suitable protocol for type hint. + + >>> deps = find_dependencies(resolve(Requirement('nspektr'))) + >>> all(isinstance(dep, Requirement) for dep in deps) + True + >>> not any('pytest' in str(dep) for dep in deps) + True + >>> test_deps = find_dependencies(resolve(Requirement('nspektr[testing]'))) + >>> any('pytest' in str(dep) for dep in test_deps) + True + """ + + def visit(req, seen=set()): + if req in seen: + return () + seen.add(req) + return find_req_dependencies(req) + + return traverse(find_direct_dependencies(dist, extras), visit) + + +class Unresolved(Exception): + def __iter__(self): + return iter(self.args[0]) + + +def missing(ep): + """ + Generate the unresolved dependencies (if any) of ep. + """ + return unsatisfied(find_dependencies(ep.dist, repair_extras(ep.extras))) + + +def check(ep): + """ + >>> ep, = metadata.entry_points(group='console_scripts', name='pip') + >>> check(ep) + >>> dist = metadata.distribution('nspektr') + + Since 'docs' extras are not installed, requesting them should fail. + + >>> ep = metadata.EntryPoint( + ... group=None, name=None, value='nspektr [docs]')._for(dist) + >>> check(ep) + Traceback (most recent call last): + ... + nspektr.Unresolved: [...] + """ + missed = list(missing(ep)) + if missed: + raise Unresolved(missed) diff --git a/setuptools/_vendor/nspektr/_compat.py b/setuptools/_vendor/nspektr/_compat.py new file mode 100644 index 00000000..3278379a --- /dev/null +++ b/setuptools/_vendor/nspektr/_compat.py @@ -0,0 +1,21 @@ +import contextlib +import sys + + +if sys.version_info >= (3, 10): + import importlib.metadata as metadata +else: + import setuptools.extern.importlib_metadata as metadata # type: ignore # noqa: F401 + + +def repair_extras(extras): + """ + Repair extras that appear as match objects. + + python/importlib_metadata#369 revealed a flaw in the EntryPoint + implementation. This function wraps the extras to ensure + they are proper strings even on older implementations. + """ + with contextlib.suppress(AttributeError): + return list(item.group(0) for item in extras) + return extras diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER b/setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE b/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE new file mode 100644 index 00000000..e859590f --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Taneli Hukkinen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA b/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA new file mode 100644 index 00000000..efd87ecc --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/METADATA @@ -0,0 +1,206 @@ +Metadata-Version: 2.1 +Name: tomli +Version: 2.0.1 +Summary: A lil' TOML parser +Keywords: toml +Author-email: Taneli Hukkinen <hukkin@users.noreply.github.com> +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Project-URL: Changelog, https://github.com/hukkin/tomli/blob/master/CHANGELOG.md +Project-URL: Homepage, https://github.com/hukkin/tomli + +[](https://github.com/hukkin/tomli/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush) +[](https://codecov.io/gh/hukkin/tomli) +[](https://pypi.org/project/tomli) + +# Tomli + +> A lil' TOML parser + +**Table of Contents** *generated with [mdformat-toc](https://github.com/hukkin/mdformat-toc)* + +<!-- mdformat-toc start --slug=github --maxlevel=6 --minlevel=2 --> + +- [Intro](#intro) +- [Installation](#installation) +- [Usage](#usage) + - [Parse a TOML string](#parse-a-toml-string) + - [Parse a TOML file](#parse-a-toml-file) + - [Handle invalid TOML](#handle-invalid-toml) + - [Construct `decimal.Decimal`s from TOML floats](#construct-decimaldecimals-from-toml-floats) +- [FAQ](#faq) + - [Why this parser?](#why-this-parser) + - [Is comment preserving round-trip parsing supported?](#is-comment-preserving-round-trip-parsing-supported) + - [Is there a `dumps`, `write` or `encode` function?](#is-there-a-dumps-write-or-encode-function) + - [How do TOML types map into Python types?](#how-do-toml-types-map-into-python-types) +- [Performance](#performance) + +<!-- mdformat-toc end --> + +## Intro<a name="intro"></a> + +Tomli is a Python library for parsing [TOML](https://toml.io). +Tomli is fully compatible with [TOML v1.0.0](https://toml.io/en/v1.0.0). + +## Installation<a name="installation"></a> + +```bash +pip install tomli +``` + +## Usage<a name="usage"></a> + +### Parse a TOML string<a name="parse-a-toml-string"></a> + +```python +import tomli + +toml_str = """ + gretzky = 99 + + [kurri] + jari = 17 + """ + +toml_dict = tomli.loads(toml_str) +assert toml_dict == {"gretzky": 99, "kurri": {"jari": 17}} +``` + +### Parse a TOML file<a name="parse-a-toml-file"></a> + +```python +import tomli + +with open("path_to_file/conf.toml", "rb") as f: + toml_dict = tomli.load(f) +``` + +The file must be opened in binary mode (with the `"rb"` flag). +Binary mode will enforce decoding the file as UTF-8 with universal newlines disabled, +both of which are required to correctly parse TOML. + +### Handle invalid TOML<a name="handle-invalid-toml"></a> + +```python +import tomli + +try: + toml_dict = tomli.loads("]] this is invalid TOML [[") +except tomli.TOMLDecodeError: + print("Yep, definitely not valid.") +``` + +Note that error messages are considered informational only. +They should not be assumed to stay constant across Tomli versions. + +### Construct `decimal.Decimal`s from TOML floats<a name="construct-decimaldecimals-from-toml-floats"></a> + +```python +from decimal import Decimal +import tomli + +toml_dict = tomli.loads("precision-matters = 0.982492", parse_float=Decimal) +assert toml_dict["precision-matters"] == Decimal("0.982492") +``` + +Note that `decimal.Decimal` can be replaced with another callable that converts a TOML float from string to a Python type. +The `decimal.Decimal` is, however, a practical choice for use cases where float inaccuracies can not be tolerated. + +Illegal types are `dict` and `list`, and their subtypes. +A `ValueError` will be raised if `parse_float` produces illegal types. + +## FAQ<a name="faq"></a> + +### Why this parser?<a name="why-this-parser"></a> + +- it's lil' +- pure Python with zero dependencies +- the fastest pure Python parser [\*](#performance): + 15x as fast as [tomlkit](https://pypi.org/project/tomlkit/), + 2.4x as fast as [toml](https://pypi.org/project/toml/) +- outputs [basic data types](#how-do-toml-types-map-into-python-types) only +- 100% spec compliant: passes all tests in + [a test set](https://github.com/toml-lang/compliance/pull/8) + soon to be merged to the official + [compliance tests for TOML](https://github.com/toml-lang/compliance) + repository +- thoroughly tested: 100% branch coverage + +### Is comment preserving round-trip parsing supported?<a name="is-comment-preserving-round-trip-parsing-supported"></a> + +No. + +The `tomli.loads` function returns a plain `dict` that is populated with builtin types and types from the standard library only. +Preserving comments requires a custom type to be returned so will not be supported, +at least not by the `tomli.loads` and `tomli.load` functions. + +Look into [TOML Kit](https://github.com/sdispater/tomlkit) if preservation of style is what you need. + +### Is there a `dumps`, `write` or `encode` function?<a name="is-there-a-dumps-write-or-encode-function"></a> + +[Tomli-W](https://github.com/hukkin/tomli-w) is the write-only counterpart of Tomli, providing `dump` and `dumps` functions. + +The core library does not include write capability, as most TOML use cases are read-only, and Tomli intends to be minimal. + +### How do TOML types map into Python types?<a name="how-do-toml-types-map-into-python-types"></a> + +| TOML type | Python type | Details | +| ---------------- | ------------------- | ------------------------------------------------------------ | +| Document Root | `dict` | | +| Key | `str` | | +| String | `str` | | +| Integer | `int` | | +| Float | `float` | | +| Boolean | `bool` | | +| Offset Date-Time | `datetime.datetime` | `tzinfo` attribute set to an instance of `datetime.timezone` | +| Local Date-Time | `datetime.datetime` | `tzinfo` attribute set to `None` | +| Local Date | `datetime.date` | | +| Local Time | `datetime.time` | | +| Array | `list` | | +| Table | `dict` | | +| Inline Table | `dict` | | + +## Performance<a name="performance"></a> + +The `benchmark/` folder in this repository contains a performance benchmark for comparing the various Python TOML parsers. +The benchmark can be run with `tox -e benchmark-pypi`. +Running the benchmark on my personal computer output the following: + +```console +foo@bar:~/dev/tomli$ tox -e benchmark-pypi +benchmark-pypi installed: attrs==19.3.0,click==7.1.2,pytomlpp==1.0.2,qtoml==0.3.0,rtoml==0.7.0,toml==0.10.2,tomli==1.1.0,tomlkit==0.7.2 +benchmark-pypi run-test-pre: PYTHONHASHSEED='2658546909' +benchmark-pypi run-test: commands[0] | python -c 'import datetime; print(datetime.date.today())' +2021-07-23 +benchmark-pypi run-test: commands[1] | python --version +Python 3.8.10 +benchmark-pypi run-test: commands[2] | python benchmark/run.py +Parsing data.toml 5000 times: +------------------------------------------------------ + parser | exec time | performance (more is better) +-----------+------------+----------------------------- + rtoml | 0.901 s | baseline (100%) + pytomlpp | 1.08 s | 83.15% + tomli | 3.89 s | 23.15% + toml | 9.36 s | 9.63% + qtoml | 11.5 s | 7.82% + tomlkit | 56.8 s | 1.59% +``` + +The parsers are ordered from fastest to slowest, using the fastest parser as baseline. +Tomli performed the best out of all pure Python TOML parsers, +losing only to pytomlpp (wraps C++) and rtoml (wraps Rust). + diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD new file mode 100644 index 00000000..2d93fa2c --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/RECORD @@ -0,0 +1,15 @@ +tomli-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +tomli-2.0.1.dist-info/LICENSE,sha256=uAgWsNUwuKzLTCIReDeQmEpuO2GSLCte6S8zcqsnQv4,1072 +tomli-2.0.1.dist-info/METADATA,sha256=zPDceKmPwJGLWtZykrHixL7WVXWmJGzZ1jyRT5lCoPI,8875 +tomli-2.0.1.dist-info/RECORD,, +tomli-2.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +tomli-2.0.1.dist-info/WHEEL,sha256=jPMR_Dzkc4X4icQtmz81lnNY_kAsfog7ry7qoRvYLXw,81 +tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396 +tomli/__pycache__/__init__.cpython-38.pyc,, +tomli/__pycache__/_parser.cpython-38.pyc,, +tomli/__pycache__/_re.cpython-38.pyc,, +tomli/__pycache__/_types.cpython-38.pyc,, +tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633 +tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943 +tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254 +tomli/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26 diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED b/setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/REQUESTED diff --git a/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL b/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL new file mode 100644 index 00000000..c727d148 --- /dev/null +++ b/setuptools/_vendor/tomli-2.0.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.6.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/setuptools/_vendor/tomli/__init__.py b/setuptools/_vendor/tomli/__init__.py new file mode 100644 index 00000000..4c6ec97e --- /dev/null +++ b/setuptools/_vendor/tomli/__init__.py @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +__all__ = ("loads", "load", "TOMLDecodeError") +__version__ = "2.0.1" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT + +from ._parser import TOMLDecodeError, load, loads + +# Pretend this exception was created here. +TOMLDecodeError.__module__ = __name__ diff --git a/setuptools/_vendor/tomli/_parser.py b/setuptools/_vendor/tomli/_parser.py new file mode 100644 index 00000000..f1bb0aa1 --- /dev/null +++ b/setuptools/_vendor/tomli/_parser.py @@ -0,0 +1,691 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + +from collections.abc import Iterable +import string +from types import MappingProxyType +from typing import Any, BinaryIO, NamedTuple + +from ._re import ( + RE_DATETIME, + RE_LOCALTIME, + RE_NUMBER, + match_to_datetime, + match_to_localtime, + match_to_number, +) +from ._types import Key, ParseFloat, Pos + +ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) + +# Neither of these sets include quotation mark or backslash. They are +# currently handled as separate cases in the parser functions. +ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") +ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n") + +ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS +ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS + +ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS + +TOML_WS = frozenset(" \t") +TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") +BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") +KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") +HEXDIGIT_CHARS = frozenset(string.hexdigits) + +BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( + { + "\\b": "\u0008", # backspace + "\\t": "\u0009", # tab + "\\n": "\u000A", # linefeed + "\\f": "\u000C", # form feed + "\\r": "\u000D", # carriage return + '\\"': "\u0022", # quote + "\\\\": "\u005C", # backslash + } +) + + +class TOMLDecodeError(ValueError): + """An error raised if a document is not valid TOML.""" + + +def load(__fp: BinaryIO, *, parse_float: ParseFloat = float) -> dict[str, Any]: + """Parse TOML from a binary file object.""" + b = __fp.read() + try: + s = b.decode() + except AttributeError: + raise TypeError( + "File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`" + ) from None + return loads(s, parse_float=parse_float) + + +def loads(__s: str, *, parse_float: ParseFloat = float) -> dict[str, Any]: # noqa: C901 + """Parse TOML from a string.""" + + # The spec allows converting "\r\n" to "\n", even in string + # literals. Let's do so to simplify parsing. + src = __s.replace("\r\n", "\n") + pos = 0 + out = Output(NestedDict(), Flags()) + header: Key = () + parse_float = make_safe_parse_float(parse_float) + + # Parse one statement at a time + # (typically means one line in TOML source) + while True: + # 1. Skip line leading whitespace + pos = skip_chars(src, pos, TOML_WS) + + # 2. Parse rules. Expect one of the following: + # - end of file + # - end of line + # - comment + # - key/value pair + # - append dict to list (and move to its namespace) + # - create dict (and move to its namespace) + # Skip trailing whitespace when applicable. + try: + char = src[pos] + except IndexError: + break + if char == "\n": + pos += 1 + continue + if char in KEY_INITIAL_CHARS: + pos = key_value_rule(src, pos, out, header, parse_float) + pos = skip_chars(src, pos, TOML_WS) + elif char == "[": + try: + second_char: str | None = src[pos + 1] + except IndexError: + second_char = None + out.flags.finalize_pending() + if second_char == "[": + pos, header = create_list_rule(src, pos, out) + else: + pos, header = create_dict_rule(src, pos, out) + pos = skip_chars(src, pos, TOML_WS) + elif char != "#": + raise suffixed_err(src, pos, "Invalid statement") + + # 3. Skip comment + pos = skip_comment(src, pos) + + # 4. Expect end of line or end of file + try: + char = src[pos] + except IndexError: + break + if char != "\n": + raise suffixed_err( + src, pos, "Expected newline or end of document after a statement" + ) + pos += 1 + + return out.data.dict + + +class Flags: + """Flags that map to parsed keys/namespaces.""" + + # Marks an immutable namespace (inline array or inline table). + FROZEN = 0 + # Marks a nest that has been explicitly created and can no longer + # be opened using the "[table]" syntax. + EXPLICIT_NEST = 1 + + def __init__(self) -> None: + self._flags: dict[str, dict] = {} + self._pending_flags: set[tuple[Key, int]] = set() + + def add_pending(self, key: Key, flag: int) -> None: + self._pending_flags.add((key, flag)) + + def finalize_pending(self) -> None: + for key, flag in self._pending_flags: + self.set(key, flag, recursive=False) + self._pending_flags.clear() + + def unset_all(self, key: Key) -> None: + cont = self._flags + for k in key[:-1]: + if k not in cont: + return + cont = cont[k]["nested"] + cont.pop(key[-1], None) + + def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 + cont = self._flags + key_parent, key_stem = key[:-1], key[-1] + for k in key_parent: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + if key_stem not in cont: + cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag) + + def is_(self, key: Key, flag: int) -> bool: + if not key: + return False # document root has no flags + cont = self._flags + for k in key[:-1]: + if k not in cont: + return False + inner_cont = cont[k] + if flag in inner_cont["recursive_flags"]: + return True + cont = inner_cont["nested"] + key_stem = key[-1] + if key_stem in cont: + cont = cont[key_stem] + return flag in cont["flags"] or flag in cont["recursive_flags"] + return False + + +class NestedDict: + def __init__(self) -> None: + # The parsed content of the TOML document + self.dict: dict[str, Any] = {} + + def get_or_create_nest( + self, + key: Key, + *, + access_lists: bool = True, + ) -> dict: + cont: Any = self.dict + for k in key: + if k not in cont: + cont[k] = {} + cont = cont[k] + if access_lists and isinstance(cont, list): + cont = cont[-1] + if not isinstance(cont, dict): + raise KeyError("There is no nest behind this key") + return cont + + def append_nest_to_list(self, key: Key) -> None: + cont = self.get_or_create_nest(key[:-1]) + last_key = key[-1] + if last_key in cont: + list_ = cont[last_key] + if not isinstance(list_, list): + raise KeyError("An object other than list found behind this key") + list_.append({}) + else: + cont[last_key] = [{}] + + +class Output(NamedTuple): + data: NestedDict + flags: Flags + + +def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: + try: + while src[pos] in chars: + pos += 1 + except IndexError: + pass + return pos + + +def skip_until( + src: str, + pos: Pos, + expect: str, + *, + error_on: frozenset[str], + error_on_eof: bool, +) -> Pos: + try: + new_pos = src.index(expect, pos) + except ValueError: + new_pos = len(src) + if error_on_eof: + raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None + + if not error_on.isdisjoint(src[pos:new_pos]): + while src[pos] not in error_on: + pos += 1 + raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") + return new_pos + + +def skip_comment(src: str, pos: Pos) -> Pos: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char == "#": + return skip_until( + src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False + ) + return pos + + +def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: + while True: + pos_before_skip = pos + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + pos = skip_comment(src, pos) + if pos == pos_before_skip: + return pos + + +def create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: + pos += 1 # Skip "[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot declare {key} twice") + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.get_or_create_nest(key) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + + if not src.startswith("]", pos): + raise suffixed_err(src, pos, "Expected ']' at the end of a table declaration") + return pos + 1, key + + +def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: + pos += 2 # Skip "[[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + # Free the namespace now that it points to another empty list item... + out.flags.unset_all(key) + # ...but this key precisely is still prohibited from table declaration + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.append_nest_to_list(key) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + + if not src.startswith("]]", pos): + raise suffixed_err(src, pos, "Expected ']]' at the end of an array declaration") + return pos + 2, key + + +def key_value_rule( + src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat +) -> Pos: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + abs_key_parent = header + key_parent + + relative_path_cont_keys = (header + key[:i] for i in range(1, len(key))) + for cont_key in relative_path_cont_keys: + # Check that dotted key syntax does not redefine an existing table + if out.flags.is_(cont_key, Flags.EXPLICIT_NEST): + raise suffixed_err(src, pos, f"Cannot redefine namespace {cont_key}") + # Containers in the relative path can't be opened with the table syntax or + # dotted key/value syntax in following table sections. + out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST) + + if out.flags.is_(abs_key_parent, Flags.FROZEN): + raise suffixed_err( + src, pos, f"Cannot mutate immutable namespace {abs_key_parent}" + ) + + try: + nest = out.data.get_or_create_nest(abs_key_parent) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, "Cannot overwrite a value") + # Mark inline table and array namespaces recursively immutable + if isinstance(value, (dict, list)): + out.flags.set(header + key, Flags.FROZEN, recursive=True) + nest[key_stem] = value + return pos + + +def parse_key_value_pair( + src: str, pos: Pos, parse_float: ParseFloat +) -> tuple[Pos, Key, Any]: + pos, key = parse_key(src, pos) + try: + char: str | None = src[pos] + except IndexError: + char = None + if char != "=": + raise suffixed_err(src, pos, "Expected '=' after a key in a key/value pair") + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, value = parse_value(src, pos, parse_float) + return pos, key, value + + +def parse_key(src: str, pos: Pos) -> tuple[Pos, Key]: + pos, key_part = parse_key_part(src, pos) + key: Key = (key_part,) + pos = skip_chars(src, pos, TOML_WS) + while True: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char != ".": + return pos, key + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, key_part = parse_key_part(src, pos) + key += (key_part,) + pos = skip_chars(src, pos, TOML_WS) + + +def parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char in BARE_KEY_CHARS: + start_pos = pos + pos = skip_chars(src, pos, BARE_KEY_CHARS) + return pos, src[start_pos:pos] + if char == "'": + return parse_literal_str(src, pos) + if char == '"': + return parse_one_line_basic_str(src, pos) + raise suffixed_err(src, pos, "Invalid initial character for a key part") + + +def parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]: + pos += 1 + return parse_basic_str(src, pos, multiline=False) + + +def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, list]: + pos += 1 + array: list = [] + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + while True: + pos, val = parse_value(src, pos, parse_float) + array.append(val) + pos = skip_comments_and_array_ws(src, pos) + + c = src[pos : pos + 1] + if c == "]": + return pos + 1, array + if c != ",": + raise suffixed_err(src, pos, "Unclosed array") + pos += 1 + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + + +def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, dict]: + pos += 1 + nested_dict = NestedDict() + flags = Flags() + + pos = skip_chars(src, pos, TOML_WS) + if src.startswith("}", pos): + return pos + 1, nested_dict.dict + while True: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + if flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + try: + nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") + nest[key_stem] = value + pos = skip_chars(src, pos, TOML_WS) + c = src[pos : pos + 1] + if c == "}": + return pos + 1, nested_dict.dict + if c != ",": + raise suffixed_err(src, pos, "Unclosed inline table") + if isinstance(value, (dict, list)): + flags.set(key, Flags.FROZEN, recursive=True) + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + + +def parse_basic_str_escape( + src: str, pos: Pos, *, multiline: bool = False +) -> tuple[Pos, str]: + escape_id = src[pos : pos + 2] + pos += 2 + if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: + # Skip whitespace until next non-whitespace character or end of + # the doc. Error if non-whitespace is found before newline. + if escape_id != "\\\n": + pos = skip_chars(src, pos, TOML_WS) + try: + char = src[pos] + except IndexError: + return pos, "" + if char != "\n": + raise suffixed_err(src, pos, "Unescaped '\\' in a string") + pos += 1 + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + return pos, "" + if escape_id == "\\u": + return parse_hex_char(src, pos, 4) + if escape_id == "\\U": + return parse_hex_char(src, pos, 8) + try: + return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] + except KeyError: + raise suffixed_err(src, pos, "Unescaped '\\' in a string") from None + + +def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]: + return parse_basic_str_escape(src, pos, multiline=True) + + +def parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]: + hex_str = src[pos : pos + hex_len] + if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): + raise suffixed_err(src, pos, "Invalid hex value") + pos += hex_len + hex_int = int(hex_str, 16) + if not is_unicode_scalar_value(hex_int): + raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") + return pos, chr(hex_int) + + +def parse_literal_str(src: str, pos: Pos) -> tuple[Pos, str]: + pos += 1 # Skip starting apostrophe + start_pos = pos + pos = skip_until( + src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True + ) + return pos + 1, src[start_pos:pos] # Skip ending apostrophe + + +def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> tuple[Pos, str]: + pos += 3 + if src.startswith("\n", pos): + pos += 1 + + if literal: + delim = "'" + end_pos = skip_until( + src, + pos, + "'''", + error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS, + error_on_eof=True, + ) + result = src[pos:end_pos] + pos = end_pos + 3 + else: + delim = '"' + pos, result = parse_basic_str(src, pos, multiline=True) + + # Add at maximum two extra apostrophes/quotes if the end sequence + # is 4 or 5 chars long instead of just 3. + if not src.startswith(delim, pos): + return pos, result + pos += 1 + if not src.startswith(delim, pos): + return pos, result + delim + pos += 1 + return pos, result + (delim * 2) + + +def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: + if multiline: + error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape_multiline + else: + error_on = ILLEGAL_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape + result = "" + start_pos = pos + while True: + try: + char = src[pos] + except IndexError: + raise suffixed_err(src, pos, "Unterminated string") from None + if char == '"': + if not multiline: + return pos + 1, result + src[start_pos:pos] + if src.startswith('"""', pos): + return pos + 3, result + src[start_pos:pos] + pos += 1 + continue + if char == "\\": + result += src[start_pos:pos] + pos, parsed_escape = parse_escapes(src, pos) + result += parsed_escape + start_pos = pos + continue + if char in error_on: + raise suffixed_err(src, pos, f"Illegal character {char!r}") + pos += 1 + + +def parse_value( # noqa: C901 + src: str, pos: Pos, parse_float: ParseFloat +) -> tuple[Pos, Any]: + try: + char: str | None = src[pos] + except IndexError: + char = None + + # IMPORTANT: order conditions based on speed of checking and likelihood + + # Basic strings + if char == '"': + if src.startswith('"""', pos): + return parse_multiline_str(src, pos, literal=False) + return parse_one_line_basic_str(src, pos) + + # Literal strings + if char == "'": + if src.startswith("'''", pos): + return parse_multiline_str(src, pos, literal=True) + return parse_literal_str(src, pos) + + # Booleans + if char == "t": + if src.startswith("true", pos): + return pos + 4, True + if char == "f": + if src.startswith("false", pos): + return pos + 5, False + + # Arrays + if char == "[": + return parse_array(src, pos, parse_float) + + # Inline tables + if char == "{": + return parse_inline_table(src, pos, parse_float) + + # Dates and times + datetime_match = RE_DATETIME.match(src, pos) + if datetime_match: + try: + datetime_obj = match_to_datetime(datetime_match) + except ValueError as e: + raise suffixed_err(src, pos, "Invalid date or datetime") from e + return datetime_match.end(), datetime_obj + localtime_match = RE_LOCALTIME.match(src, pos) + if localtime_match: + return localtime_match.end(), match_to_localtime(localtime_match) + + # Integers and "normal" floats. + # The regex will greedily match any type starting with a decimal + # char, so needs to be located after handling of dates and times. + number_match = RE_NUMBER.match(src, pos) + if number_match: + return number_match.end(), match_to_number(number_match, parse_float) + + # Special floats + first_three = src[pos : pos + 3] + if first_three in {"inf", "nan"}: + return pos + 3, parse_float(first_three) + first_four = src[pos : pos + 4] + if first_four in {"-inf", "+inf", "-nan", "+nan"}: + return pos + 4, parse_float(first_four) + + raise suffixed_err(src, pos, "Invalid value") + + +def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: + """Return a `TOMLDecodeError` where error message is suffixed with + coordinates in source.""" + + def coord_repr(src: str, pos: Pos) -> str: + if pos >= len(src): + return "end of document" + line = src.count("\n", 0, pos) + 1 + if line == 1: + column = pos + 1 + else: + column = pos - src.rindex("\n", 0, pos) + return f"line {line}, column {column}" + + return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + + +def is_unicode_scalar_value(codepoint: int) -> bool: + return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) + + +def make_safe_parse_float(parse_float: ParseFloat) -> ParseFloat: + """A decorator to make `parse_float` safe. + + `parse_float` must not return dicts or lists, because these types + would be mixed with parsed TOML tables and arrays, thus confusing + the parser. The returned decorated callable raises `ValueError` + instead of returning illegal types. + """ + # The default `float` callable never returns illegal types. Optimize it. + if parse_float is float: # type: ignore[comparison-overlap] + return float + + def safe_parse_float(float_str: str) -> Any: + float_value = parse_float(float_str) + if isinstance(float_value, (dict, list)): + raise ValueError("parse_float must not return dicts or lists") + return float_value + + return safe_parse_float diff --git a/setuptools/_vendor/tomli/_re.py b/setuptools/_vendor/tomli/_re.py new file mode 100644 index 00000000..994bb749 --- /dev/null +++ b/setuptools/_vendor/tomli/_re.py @@ -0,0 +1,107 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + +from datetime import date, datetime, time, timedelta, timezone, tzinfo +from functools import lru_cache +import re +from typing import Any + +from ._types import ParseFloat + +# E.g. +# - 00:32:00.999999 +# - 00:32:00 +_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" + +RE_NUMBER = re.compile( + r""" +0 +(?: + x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex + | + b[01](?:_?[01])* # bin + | + o[0-7](?:_?[0-7])* # oct +) +| +[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part +(?P<floatpart> + (?:\.[0-9](?:_?[0-9])*)? # optional fractional part + (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part +) +""", + flags=re.VERBOSE, +) +RE_LOCALTIME = re.compile(_TIME_RE_STR) +RE_DATETIME = re.compile( + rf""" +([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 +(?: + [Tt ] + {_TIME_RE_STR} + (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset +)? +""", + flags=re.VERBOSE, +) + + +def match_to_datetime(match: re.Match) -> datetime | date: + """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`. + + Raises ValueError if the match does not correspond to a valid date + or datetime. + """ + ( + year_str, + month_str, + day_str, + hour_str, + minute_str, + sec_str, + micros_str, + zulu_time, + offset_sign_str, + offset_hour_str, + offset_minute_str, + ) = match.groups() + year, month, day = int(year_str), int(month_str), int(day_str) + if hour_str is None: + return date(year, month, day) + hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + if offset_sign_str: + tz: tzinfo | None = cached_tz( + offset_hour_str, offset_minute_str, offset_sign_str + ) + elif zulu_time: + tz = timezone.utc + else: # local date-time + tz = None + return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) + + +@lru_cache(maxsize=None) +def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: + sign = 1 if sign_str == "+" else -1 + return timezone( + timedelta( + hours=sign * int(hour_str), + minutes=sign * int(minute_str), + ) + ) + + +def match_to_localtime(match: re.Match) -> time: + hour_str, minute_str, sec_str, micros_str = match.groups() + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + return time(int(hour_str), int(minute_str), int(sec_str), micros) + + +def match_to_number(match: re.Match, parse_float: ParseFloat) -> Any: + if match.group("floatpart"): + return parse_float(match.group()) + return int(match.group(), 0) diff --git a/setuptools/_vendor/tomli/_types.py b/setuptools/_vendor/tomli/_types.py new file mode 100644 index 00000000..d949412e --- /dev/null +++ b/setuptools/_vendor/tomli/_types.py @@ -0,0 +1,10 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from typing import Any, Callable, Tuple + +# Type annotations +ParseFloat = Callable[[str], Any] +Key = Tuple[str, ...] +Pos = int diff --git a/setuptools/_vendor/tomli/py.typed b/setuptools/_vendor/tomli/py.typed new file mode 100644 index 00000000..7632ecf7 --- /dev/null +++ b/setuptools/_vendor/tomli/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/setuptools/_vendor/vendored.txt b/setuptools/_vendor/vendored.txt index db24b402..b08b0d6f 100644 --- a/setuptools/_vendor/vendored.txt +++ b/setuptools/_vendor/vendored.txt @@ -5,7 +5,9 @@ more_itertools==8.8.0 jaraco.text==3.7.0 importlib_resources==5.4.0 importlib_metadata==4.11.1 +nspektr==0.3.0 # required for importlib_metadata on older Pythons typing_extensions==4.0.1 # required for importlib_resources and _metadata on older Pythons zipp==3.7.0 +tomli==2.0.1 diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index 940c916f..444d3b33 100644 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -292,7 +292,9 @@ class easy_install(Command): if not self.editable: self.check_site_dir() - self.index_url = self.index_url or "https://pypi.org/simple/" + default_index = os.getenv("__EASYINSTALL_INDEX", "https://pypi.org/simple/") + # ^ Private API for testing purposes only + self.index_url = self.index_url or default_index self.shadow_path = self.all_site_dirs[:] for path_item in self.install_dir, normalize_path(self.script_dir): if path_item not in self.shadow_path: diff --git a/setuptools/command/install.py b/setuptools/command/install.py index 35e54d20..55fdb124 100644 --- a/setuptools/command/install.py +++ b/setuptools/command/install.py @@ -91,14 +91,21 @@ class install(orig.install): msg = "For best results, pass -X:Frames to enable call stack." warnings.warn(msg) return True - res = inspect.getouterframes(run_frame)[2] - caller, = res[:1] - info = inspect.getframeinfo(caller) - caller_module = caller.f_globals.get('__name__', '') - return ( - caller_module == 'distutils.dist' - and info.function == 'run_commands' - ) + + frames = inspect.getouterframes(run_frame) + for frame in frames[2:4]: + caller, = frame[:1] + info = inspect.getframeinfo(caller) + caller_module = caller.f_globals.get('__name__', '') + + if caller_module == "setuptools.dist" and info.function == "run_command": + # Starting from v61.0.0 setuptools overwrites dist.run_command + continue + + return ( + caller_module == 'distutils.dist' + and info.function == 'run_commands' + ) def do_egg_install(self): diff --git a/setuptools/config/__init__.py b/setuptools/config/__init__.py new file mode 100644 index 00000000..35458d8e --- /dev/null +++ b/setuptools/config/__init__.py @@ -0,0 +1,35 @@ +"""For backward compatibility, expose main functions from +``setuptools.config.setupcfg`` +""" +import warnings +from functools import wraps +from textwrap import dedent +from typing import Callable, TypeVar, cast + +from .._deprecation_warning import SetuptoolsDeprecationWarning +from . import setupcfg + +Fn = TypeVar("Fn", bound=Callable) + +__all__ = ('parse_configuration', 'read_configuration') + + +def _deprecation_notice(fn: Fn) -> Fn: + @wraps(fn) + def _wrapper(*args, **kwargs): + msg = f"""\ + As setuptools moves its configuration towards `pyproject.toml`, + `{__name__}.{fn.__name__}` became deprecated. + + For the time being, you can use the `{setupcfg.__name__}` module + to access a backward compatible API, but this module is provisional + and might be removed in the future. + """ + warnings.warn(dedent(msg), SetuptoolsDeprecationWarning) + return fn(*args, **kwargs) + + return cast(Fn, _wrapper) + + +read_configuration = _deprecation_notice(setupcfg.read_configuration) +parse_configuration = _deprecation_notice(setupcfg.parse_configuration) diff --git a/setuptools/config/_apply_pyprojecttoml.py b/setuptools/config/_apply_pyprojecttoml.py new file mode 100644 index 00000000..fce5c40e --- /dev/null +++ b/setuptools/config/_apply_pyprojecttoml.py @@ -0,0 +1,374 @@ +"""Translation layer between pyproject config and setuptools distribution and +metadata objects. + +The distribution and metadata objects are modeled after (an old version of) +core metadata, therefore configs in the format specified for ``pyproject.toml`` +need to be processed before being applied. +""" +import logging +import os +import warnings +from collections.abc import Mapping +from email.headerregistry import Address +from functools import partial, reduce +from itertools import chain +from types import MappingProxyType +from typing import (TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, + Type, Union) + +if TYPE_CHECKING: + from setuptools._importlib import metadata # noqa + from setuptools.dist import Distribution # noqa + +EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like +_Path = Union[os.PathLike, str] +_DictOrStr = Union[dict, str] +_CorrespFn = Callable[["Distribution", Any, _Path], None] +_Correspondence = Union[str, _CorrespFn] + +_logger = logging.getLogger(__name__) + + +def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution": + """Apply configuration dict read with :func:`read_configuration`""" + + if not config: + return dist # short-circuit unrelated pyproject.toml file + + root_dir = os.path.dirname(filename) or "." + + _apply_project_table(dist, config, root_dir) + _apply_tool_table(dist, config, filename) + + current_directory = os.getcwd() + os.chdir(root_dir) + try: + dist._finalize_requires() + dist._finalize_license_files() + finally: + os.chdir(current_directory) + + return dist + + +def _apply_project_table(dist: "Distribution", config: dict, root_dir: _Path): + project_table = config.get("project", {}).copy() + if not project_table: + return # short-circuit + + _handle_missing_dynamic(dist, project_table) + _unify_entry_points(project_table) + + for field, value in project_table.items(): + norm_key = json_compatible_key(field) + corresp = PYPROJECT_CORRESPONDENCE.get(norm_key, norm_key) + if callable(corresp): + corresp(dist, value, root_dir) + else: + _set_config(dist, corresp, value) + + +def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path): + tool_table = config.get("tool", {}).get("setuptools", {}) + if not tool_table: + return # short-circuit + + for field, value in tool_table.items(): + norm_key = json_compatible_key(field) + norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key) + _set_config(dist, norm_key, value) + + _copy_command_options(config, dist, filename) + + +def _handle_missing_dynamic(dist: "Distribution", project_table: dict): + """Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``""" + # TODO: Set fields back to `None` once the feature stabilizes + dynamic = set(project_table.get("dynamic", [])) + for field, getter in _PREVIOUSLY_DEFINED.items(): + if not (field in project_table or field in dynamic): + value = getter(dist) + if value: + msg = _WouldIgnoreField.message(field, value) + warnings.warn(msg, _WouldIgnoreField) + + +def json_compatible_key(key: str) -> str: + """As defined in :pep:`566#json-compatible-metadata`""" + return key.lower().replace("-", "_") + + +def _set_config(dist: "Distribution", field: str, value: Any): + setter = getattr(dist.metadata, f"set_{field}", None) + if setter: + setter(value) + elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES: + setattr(dist.metadata, field, value) + else: + setattr(dist, field, value) + + +_CONTENT_TYPES = { + ".md": "text/markdown", + ".rst": "text/x-rst", + ".txt": "text/plain", +} + + +def _guess_content_type(file: str) -> Optional[str]: + _, ext = os.path.splitext(file.lower()) + if not ext: + return None + + if ext in _CONTENT_TYPES: + return _CONTENT_TYPES[ext] + + valid = ", ".join(f"{k} ({v})" for k, v in _CONTENT_TYPES.items()) + msg = f"only the following file extensions are recognized: {valid}." + raise ValueError(f"Undefined content type for {file}, {msg}") + + +def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path): + from setuptools.config import expand + + if isinstance(val, str): + text = expand.read_files(val, root_dir) + ctype = _guess_content_type(val) + else: + text = val.get("text") or expand.read_files(val.get("file", []), root_dir) + ctype = val["content-type"] + + _set_config(dist, "long_description", text) + if ctype: + _set_config(dist, "long_description_content_type", ctype) + + +def _license(dist: "Distribution", val: dict, root_dir: _Path): + from setuptools.config import expand + + if "file" in val: + _set_config(dist, "license", expand.read_files([val["file"]], root_dir)) + else: + _set_config(dist, "license", val["text"]) + + +def _people(dist: "Distribution", val: List[dict], _root_dir: _Path, kind: str): + field = [] + email_field = [] + for person in val: + if "name" not in person: + email_field.append(person["email"]) + elif "email" not in person: + field.append(person["name"]) + else: + addr = Address(display_name=person["name"], addr_spec=person["email"]) + email_field.append(str(addr)) + + if field: + _set_config(dist, kind, ", ".join(field)) + if email_field: + _set_config(dist, f"{kind}_email", ", ".join(email_field)) + + +def _project_urls(dist: "Distribution", val: dict, _root_dir): + special = {"downloadurl": "download_url", "homepage": "url"} + for key, url in val.items(): + norm_key = json_compatible_key(key).replace("_", "") + _set_config(dist, special.get(norm_key, key), url) + # If `homepage` is missing, distutils will warn the following message: + # "warning: check: missing required meta-data: url" + # In the context of PEP 621, users might ask themselves: "which url?". + # Let's add a warning before distutils check to help users understand the problem: + if not dist.metadata.url: + msg = ( + "Missing `Homepage` url.\nIt is advisable to link some kind of reference " + "for your project (e.g. source code or documentation).\n" + ) + _logger.warning(msg) + _set_config(dist, "project_urls", val.copy()) + + +def _python_requires(dist: "Distribution", val: dict, _root_dir): + from setuptools.extern.packaging.specifiers import SpecifierSet + + _set_config(dist, "python_requires", SpecifierSet(val)) + + +def _dependencies(dist: "Distribution", val: list, _root_dir): + existing = getattr(dist, "install_requires", []) + _set_config(dist, "install_requires", existing + val) + + +def _optional_dependencies(dist: "Distribution", val: dict, _root_dir): + existing = getattr(dist, "extras_require", {}) + _set_config(dist, "extras_require", {**existing, **val}) + + +def _unify_entry_points(project_table: dict): + project = project_table + entry_points = project.pop("entry-points", project.pop("entry_points", {})) + renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"} + for key, value in list(project.items()): # eager to allow modifications + norm_key = json_compatible_key(key) + if norm_key in renaming and value: + entry_points[renaming[norm_key]] = project.pop(key) + + if entry_points: + project["entry-points"] = { + name: [f"{k} = {v}" for k, v in group.items()] + for name, group in entry_points.items() + } + + +def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path): + tool_table = pyproject.get("tool", {}) + cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {}) + valid_options = _valid_command_options(cmdclass) + + cmd_opts = dist.command_options + for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items(): + cmd = json_compatible_key(cmd) + valid = valid_options.get(cmd, set()) + cmd_opts.setdefault(cmd, {}) + for key, value in config.items(): + key = json_compatible_key(key) + cmd_opts[cmd][key] = (str(filename), value) + if key not in valid: + # To avoid removing options that are specified dynamically we + # just log a warn... + _logger.warning(f"Command option {cmd}.{key} is not defined") + + +def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]: + from .._importlib import metadata + from setuptools.dist import Distribution + + valid_options = {"global": _normalise_cmd_options(Distribution.global_options)} + + unloaded_entry_points = metadata.entry_points(group='distutils.commands') + loaded_entry_points = (_load_ep(ep) for ep in unloaded_entry_points) + entry_points = (ep for ep in loaded_entry_points if ep) + for cmd, cmd_class in chain(entry_points, cmdclass.items()): + opts = valid_options.get(cmd, set()) + opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", [])) + valid_options[cmd] = opts + + return valid_options + + +def _load_ep(ep: "metadata.EntryPoint") -> Optional[Tuple[str, Type]]: + # Ignore all the errors + try: + return (ep.name, ep.load()) + except Exception as ex: + msg = f"{ex.__class__.__name__} while trying to load entry-point {ep.name}" + _logger.warning(f"{msg}: {ex}") + return None + + +def _normalise_cmd_option_key(name: str) -> str: + return json_compatible_key(name).strip("_=") + + +def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]: + return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc} + + +def _attrgetter(attr): + """ + Similar to ``operator.attrgetter`` but returns None if ``attr`` is not found + >>> from types import SimpleNamespace + >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13)) + >>> _attrgetter("a")(obj) + 42 + >>> _attrgetter("b.c")(obj) + 13 + >>> _attrgetter("d")(obj) is None + True + """ + return partial(reduce, lambda acc, x: getattr(acc, x, None), attr.split(".")) + + +def _some_attrgetter(*items): + """ + Return the first "truth-y" attribute or None + >>> from types import SimpleNamespace + >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13)) + >>> _some_attrgetter("d", "a", "b.c")(obj) + 42 + >>> _some_attrgetter("d", "e", "b.c", "a")(obj) + 13 + >>> _some_attrgetter("d", "e", "f")(obj) is None + True + """ + def _acessor(obj): + values = (_attrgetter(i)(obj) for i in items) + return next((i for i in values if i is not None), None) + return _acessor + + +PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = { + "readme": _long_description, + "license": _license, + "authors": partial(_people, kind="author"), + "maintainers": partial(_people, kind="maintainer"), + "urls": _project_urls, + "dependencies": _dependencies, + "optional_dependencies": _optional_dependencies, + "requires_python": _python_requires, +} + +TOOL_TABLE_RENAMES = {"script_files": "scripts"} + +SETUPTOOLS_PATCHES = {"long_description_content_type", "project_urls", + "provides_extras", "license_file", "license_files"} + +_PREVIOUSLY_DEFINED = { + "name": _attrgetter("metadata.name"), + "version": _attrgetter("metadata.version"), + "description": _attrgetter("metadata.description"), + "readme": _attrgetter("metadata.long_description"), + "requires-python": _some_attrgetter("python_requires", "metadata.python_requires"), + "license": _attrgetter("metadata.license"), + "authors": _some_attrgetter("metadata.author", "metadata.author_email"), + "maintainers": _some_attrgetter("metadata.maintainer", "metadata.maintainer_email"), + "keywords": _attrgetter("metadata.keywords"), + "classifiers": _attrgetter("metadata.classifiers"), + "urls": _attrgetter("metadata.project_urls"), + "entry-points": _attrgetter("entry_points"), + "dependencies": _some_attrgetter("_orig_install_requires", "install_requires"), + "optional-dependencies": _some_attrgetter("_orig_extras_require", "extras_require"), +} + + +class _WouldIgnoreField(UserWarning): + """Inform users that ``pyproject.toml`` would overwrite previously defined metadata: + !!\n\n + ########################################################################## + # configuration would be ignored/result in error due to `pyproject.toml` # + ########################################################################## + + The following seems to be defined outside of `pyproject.toml`: + + `{field} = {value!r}` + + According to the spec (see the link bellow), however, setuptools CANNOT + consider this value unless {field!r} is listed as `dynamic`. + + https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ + + For the time being, `setuptools` will still consider the given value (as a + **transitional** measure), but please note that future releases of setuptools will + follow strictly the standard. + + To prevent this warning, you can list {field!r} under `dynamic` or alternatively + remove the `[project]` table from your file and rely entirely on other means of + configuration. + \n\n!! + """ + + @classmethod + def message(cls, field, value): + from inspect import cleandoc + msg = "\n".join(cls.__doc__.splitlines()[1:]) + return cleandoc(msg.format(field=field, value=value)) diff --git a/setuptools/config/_validate_pyproject/NOTICE b/setuptools/config/_validate_pyproject/NOTICE new file mode 100644 index 00000000..286d2908 --- /dev/null +++ b/setuptools/config/_validate_pyproject/NOTICE @@ -0,0 +1,439 @@ +The code contained in this directory was automatically generated using the +following command: + + python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose + +Please avoid changing it manually. + + +You can report issues or suggest changes directly to `validate-pyproject` +(or to the relevant plugin repository) + +- https://github.com/abravalheri/validate-pyproject/issues + + +*** + +The following files include code from opensource projects +(either as direct copies or modified versions): + +- `fastjsonschema_exceptions.py`: + - project: `fastjsonschema` - licensed under BSD-3-Clause + (https://github.com/horejsek/python-fastjsonschema) +- `extra_validations.py` and `format.py`, `error_reporting.py`: + - project: `validate-pyproject` - licensed under MPL-2.0 + (https://github.com/abravalheri/validate-pyproject) + + +Additionally the following files are automatically generated by tools provided +by the same projects: + +- `__init__.py` +- `fastjsonschema_validations.py` + +The relevant copyright notes and licenses are included below. + + +*** + +`fastjsonschema` +================ + +Copyright (c) 2018, Michal Horejsek +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + + Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +*** + +`validate-pyproject` +==================== + +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + https://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + diff --git a/setuptools/config/_validate_pyproject/__init__.py b/setuptools/config/_validate_pyproject/__init__.py new file mode 100644 index 00000000..dbe6cb4c --- /dev/null +++ b/setuptools/config/_validate_pyproject/__init__.py @@ -0,0 +1,34 @@ +from functools import reduce +from typing import Any, Callable, Dict + +from . import formats +from .error_reporting import detailed_errors, ValidationError +from .extra_validations import EXTRA_VALIDATIONS +from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException +from .fastjsonschema_validations import validate as _validate + +__all__ = [ + "validate", + "FORMAT_FUNCTIONS", + "EXTRA_VALIDATIONS", + "ValidationError", + "JsonSchemaException", + "JsonSchemaValueException", +] + + +FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = { + fn.__name__.replace("_", "-"): fn + for fn in formats.__dict__.values() + if callable(fn) and not fn.__name__.startswith("_") +} + + +def validate(data: Any) -> bool: + """Validate the given ``data`` object using JSON Schema + This function raises ``ValidationError`` if ``data`` is invalid. + """ + with detailed_errors(): + _validate(data, custom_formats=FORMAT_FUNCTIONS) + reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data) + return True diff --git a/setuptools/config/_validate_pyproject/error_reporting.py b/setuptools/config/_validate_pyproject/error_reporting.py new file mode 100644 index 00000000..f78e4838 --- /dev/null +++ b/setuptools/config/_validate_pyproject/error_reporting.py @@ -0,0 +1,318 @@ +import io +import json +import logging +import os +import re +from contextlib import contextmanager +from textwrap import indent, wrap +from typing import Any, Dict, Iterator, List, Optional, Sequence, Union, cast + +from .fastjsonschema_exceptions import JsonSchemaValueException + +_logger = logging.getLogger(__name__) + +_MESSAGE_REPLACEMENTS = { + "must be named by propertyName definition": "keys must be named by", + "one of contains definition": "at least one item that matches", + " same as const definition:": "", + "only specified items": "only items matching the definition", +} + +_SKIP_DETAILS = ( + "must not be empty", + "is always invalid", + "must not be there", +) + +_NEED_DETAILS = {"anyOf", "oneOf", "anyOf", "contains", "propertyNames", "not", "items"} + +_CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)") +_IDENTIFIER = re.compile(r"^[\w_]+$", re.I) + +_TOML_JARGON = { + "object": "table", + "property": "key", + "properties": "keys", + "property names": "keys", +} + + +class ValidationError(JsonSchemaValueException): + """Report violations of a given JSON schema. + + This class extends :exc:`~fastjsonschema.JsonSchemaValueException` + by adding the following properties: + + - ``summary``: an improved version of the ``JsonSchemaValueException`` error message + with only the necessary information) + + - ``details``: more contextual information about the error like the failing schema + itself and the value that violates the schema. + + Depending on the level of the verbosity of the ``logging`` configuration + the exception message will be only ``summary`` (default) or a combination of + ``summary`` and ``details`` (when the logging level is set to :obj:`logging.DEBUG`). + """ + + summary = "" + details = "" + _original_message = "" + + @classmethod + def _from_jsonschema(cls, ex: JsonSchemaValueException): + formatter = _ErrorFormatting(ex) + obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule) + debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower() + if debug_code != "false": # pragma: no cover + obj.__cause__, obj.__traceback__ = ex.__cause__, ex.__traceback__ + obj._original_message = ex.message + obj.summary = formatter.summary + obj.details = formatter.details + return obj + + +@contextmanager +def detailed_errors(): + try: + yield + except JsonSchemaValueException as ex: + raise ValidationError._from_jsonschema(ex) from None + + +class _ErrorFormatting: + def __init__(self, ex: JsonSchemaValueException): + self.ex = ex + self.name = f"`{self._simplify_name(ex.name)}`" + self._original_message = self.ex.message.replace(ex.name, self.name) + self._summary = "" + self._details = "" + + def __str__(self) -> str: + if _logger.getEffectiveLevel() <= logging.DEBUG and self.details: + return f"{self.summary}\n\n{self.details}" + + return self.summary + + @property + def summary(self) -> str: + if not self._summary: + self._summary = self._expand_summary() + + return self._summary + + @property + def details(self) -> str: + if not self._details: + self._details = self._expand_details() + + return self._details + + def _simplify_name(self, name): + x = len("data.") + return name[x:] if name.startswith("data.") else name + + def _expand_summary(self): + msg = self._original_message + + for bad, repl in _MESSAGE_REPLACEMENTS.items(): + msg = msg.replace(bad, repl) + + if any(substring in msg for substring in _SKIP_DETAILS): + return msg + + schema = self.ex.rule_definition + if self.ex.rule in _NEED_DETAILS and schema: + summary = _SummaryWriter(_TOML_JARGON) + return f"{msg}:\n\n{indent(summary(schema), ' ')}" + + return msg + + def _expand_details(self) -> str: + optional = [] + desc_lines = self.ex.definition.pop("$$description", []) + desc = self.ex.definition.pop("description", None) or " ".join(desc_lines) + if desc: + description = "\n".join( + wrap( + desc, + width=80, + initial_indent=" ", + subsequent_indent=" ", + break_long_words=False, + ) + ) + optional.append(f"DESCRIPTION:\n{description}") + schema = json.dumps(self.ex.definition, indent=4) + value = json.dumps(self.ex.value, indent=4) + defaults = [ + f"GIVEN VALUE:\n{indent(value, ' ')}", + f"OFFENDING RULE: {self.ex.rule!r}", + f"DEFINITION:\n{indent(schema, ' ')}", + ] + return "\n\n".join(optional + defaults) + + +class _SummaryWriter: + _IGNORE = {"description", "default", "title", "examples"} + + def __init__(self, jargon: Optional[Dict[str, str]] = None): + self.jargon: Dict[str, str] = jargon or {} + # Clarify confusing terms + self._terms = { + "anyOf": "at least one of the following", + "oneOf": "exactly one of the following", + "allOf": "all of the following", + "not": "(*NOT* the following)", + "prefixItems": f"{self._jargon('items')} (in order)", + "items": "items", + "contains": "contains at least one of", + "propertyNames": ( + f"non-predefined acceptable {self._jargon('property names')}" + ), + "patternProperties": f"{self._jargon('properties')} named via pattern", + "const": "predefined value", + "enum": "one of", + } + # Attributes that indicate that the definition is easy and can be done + # inline (e.g. string and number) + self._guess_inline_defs = [ + "enum", + "const", + "maxLength", + "minLength", + "pattern", + "format", + "minimum", + "maximum", + "exclusiveMinimum", + "exclusiveMaximum", + "multipleOf", + ] + + def _jargon(self, term: Union[str, List[str]]) -> Union[str, List[str]]: + if isinstance(term, list): + return [self.jargon.get(t, t) for t in term] + return self.jargon.get(term, term) + + def __call__( + self, + schema: Union[dict, List[dict]], + prefix: str = "", + *, + _path: Sequence[str] = (), + ) -> str: + if isinstance(schema, list): + return self._handle_list(schema, prefix, _path) + + filtered = self._filter_unecessary(schema, _path) + simple = self._handle_simple_dict(filtered, _path) + if simple: + return f"{prefix}{simple}" + + child_prefix = self._child_prefix(prefix, " ") + item_prefix = self._child_prefix(prefix, "- ") + indent = len(prefix) * " " + with io.StringIO() as buffer: + for i, (key, value) in enumerate(filtered.items()): + child_path = [*_path, key] + line_prefix = prefix if i == 0 else indent + buffer.write(f"{line_prefix}{self._label(child_path)}:") + # ^ just the first item should receive the complete prefix + if isinstance(value, dict): + filtered = self._filter_unecessary(value, child_path) + simple = self._handle_simple_dict(filtered, child_path) + buffer.write( + f" {simple}" + if simple + else f"\n{self(value, child_prefix, _path=child_path)}" + ) + elif isinstance(value, list) and ( + key != "type" or self._is_property(child_path) + ): + children = self._handle_list(value, item_prefix, child_path) + sep = " " if children.startswith("[") else "\n" + buffer.write(f"{sep}{children}") + else: + buffer.write(f" {self._value(value, child_path)}\n") + return buffer.getvalue() + + def _is_unecessary(self, path: Sequence[str]) -> bool: + if self._is_property(path) or not path: # empty path => instruction @ root + return False + key = path[-1] + return any(key.startswith(k) for k in "$_") or key in self._IGNORE + + def _filter_unecessary(self, schema: dict, path: Sequence[str]): + return { + key: value + for key, value in schema.items() + if not self._is_unecessary([*path, key]) + } + + def _handle_simple_dict(self, value: dict, path: Sequence[str]) -> Optional[str]: + inline = any(p in value for p in self._guess_inline_defs) + simple = not any(isinstance(v, (list, dict)) for v in value.values()) + if inline or simple: + return f"{{{', '.join(self._inline_attrs(value, path))}}}\n" + return None + + def _handle_list( + self, schemas: list, prefix: str = "", path: Sequence[str] = () + ) -> str: + if self._is_unecessary(path): + return "" + + repr_ = repr(schemas) + if all(not isinstance(e, (dict, list)) for e in schemas) and len(repr_) < 60: + return f"{repr_}\n" + + item_prefix = self._child_prefix(prefix, "- ") + return "".join( + self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas) + ) + + def _is_property(self, path: Sequence[str]): + """Check if the given path can correspond to an arbitrarily named property""" + counter = 0 + for key in path[-2::-1]: + if key not in {"properties", "patternProperties"}: + break + counter += 1 + + # If the counter if even, the path correspond to a JSON Schema keyword + # otherwise it can be any arbitrary string naming a property + return counter % 2 == 1 + + def _label(self, path: Sequence[str]) -> str: + *parents, key = path + if not self._is_property(path): + norm_key = _separate_terms(key) + return self._terms.get(key) or " ".join(self._jargon(norm_key)) + + if parents[-1] == "patternProperties": + return f"(regex {key!r})" + return repr(key) # property name + + def _value(self, value: Any, path: Sequence[str]) -> str: + if path[-1] == "type" and not self._is_property(path): + type_ = self._jargon(value) + return ( + f"[{', '.join(type_)}]" if isinstance(value, list) else cast(str, type_) + ) + return repr(value) + + def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]: + for key, value in schema.items(): + child_path = [*path, key] + yield f"{self._label(child_path)}: {self._value(value, child_path)}" + + def _child_prefix(self, parent_prefix: str, child_prefix: str) -> str: + return len(parent_prefix) * " " + child_prefix + + +def _separate_terms(word: str) -> List[str]: + """ + >>> _separate_terms("FooBar-foo") + ['foo', 'bar', 'foo'] + """ + return [w.lower() for w in _CAMEL_CASE_SPLITTER.split(word) if w] diff --git a/setuptools/config/_validate_pyproject/extra_validations.py b/setuptools/config/_validate_pyproject/extra_validations.py new file mode 100644 index 00000000..48c4e257 --- /dev/null +++ b/setuptools/config/_validate_pyproject/extra_validations.py @@ -0,0 +1,36 @@ +"""The purpose of this module is implement PEP 621 validations that are +difficult to express as a JSON Schema (or that are not supported by the current +JSON Schema library). +""" + +from typing import Mapping, TypeVar + +from .fastjsonschema_exceptions import JsonSchemaValueException + +T = TypeVar("T", bound=Mapping) + + +class RedefiningStaticFieldAsDynamic(JsonSchemaValueException): + """According to PEP 621: + + Build back-ends MUST raise an error if the metadata specifies a field + statically as well as being listed in dynamic. + """ + + +def validate_project_dynamic(pyproject: T) -> T: + project_table = pyproject.get("project", {}) + dynamic = project_table.get("dynamic", []) + + for field in dynamic: + if field in project_table: + msg = f"You cannot provide a value for `project.{field}` and " + msg += "list it under `project.dynamic` at the same time" + name = f"data.project.{field}" + value = {field: project_table[field], "...": " # ...", "dynamic": dynamic} + raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621") + + return pyproject + + +EXTRA_VALIDATIONS = (validate_project_dynamic,) diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py b/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py new file mode 100644 index 00000000..d2dddd6a --- /dev/null +++ b/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py @@ -0,0 +1,51 @@ +import re + + +SPLIT_RE = re.compile(r'[\.\[\]]+') + + +class JsonSchemaException(ValueError): + """ + Base exception of ``fastjsonschema`` library. + """ + + +class JsonSchemaValueException(JsonSchemaException): + """ + Exception raised by validation function. Available properties: + + * ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``), + * invalid ``value`` (e.g. ``60``), + * ``name`` of a path in the data structure (e.g. ``data.property[index]``), + * ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``), + * the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``), + * ``rule`` which the ``value`` is breaking (e.g. ``maximum``) + * and ``rule_definition`` (e.g. ``42``). + + .. versionchanged:: 2.14.0 + Added all extra properties. + """ + + def __init__(self, message, value=None, name=None, definition=None, rule=None): + super().__init__(message) + self.message = message + self.value = value + self.name = name + self.definition = definition + self.rule = rule + + @property + def path(self): + return [item for item in SPLIT_RE.split(self.name) if item != ''] + + @property + def rule_definition(self): + if not self.rule or not self.definition: + return None + return self.definition.get(self.rule) + + +class JsonSchemaDefinitionException(JsonSchemaException): + """ + Exception raised by generator of validation function. + """ diff --git a/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py new file mode 100644 index 00000000..3ad1edd0 --- /dev/null +++ b/setuptools/config/_validate_pyproject/fastjsonschema_validations.py @@ -0,0 +1,1004 @@ +# noqa +# type: ignore +# flake8: noqa +# pylint: skip-file +# mypy: ignore-errors +# yapf: disable +# pylama:skip=1 + + +# *** PLEASE DO NOT MODIFY DIRECTLY: Automatically generated code *** + + +VERSION = "2.15.3" +import re +from .fastjsonschema_exceptions import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^.*$': re.compile('^.*$'), + '.+': re.compile('.+'), + '^.+$': re.compile('^.+$'), + 'idn-email_re_pattern': re.compile('^[^@]+@[^@]+\\.[^@]+\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "build-system" in data_keys: + data_keys.remove("build-system") + data__buildsystem = data["build-system"] + if not isinstance(data__buildsystem, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must be object", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='type') + data__buildsystem_is_dict = isinstance(data__buildsystem, dict) + if data__buildsystem_is_dict: + data__buildsystem_len = len(data__buildsystem) + if not all(prop in data__buildsystem for prop in ['requires']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must contain ['requires'] properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='required') + data__buildsystem_keys = set(data__buildsystem.keys()) + if "requires" in data__buildsystem_keys: + data__buildsystem_keys.remove("requires") + data__buildsystem__requires = data__buildsystem["requires"] + if not isinstance(data__buildsystem__requires, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires must be array", value=data__buildsystem__requires, name="" + (name_prefix or "data") + ".build-system.requires", definition={'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, rule='type') + data__buildsystem__requires_is_list = isinstance(data__buildsystem__requires, (list, tuple)) + if data__buildsystem__requires_is_list: + data__buildsystem__requires_len = len(data__buildsystem__requires) + for data__buildsystem__requires_x, data__buildsystem__requires_item in enumerate(data__buildsystem__requires): + if not isinstance(data__buildsystem__requires_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + " must be string", value=data__buildsystem__requires_item, name="" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "build-backend" in data__buildsystem_keys: + data__buildsystem_keys.remove("build-backend") + data__buildsystem__buildbackend = data__buildsystem["build-backend"] + if not isinstance(data__buildsystem__buildbackend, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be string", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='type') + if isinstance(data__buildsystem__buildbackend, str): + if not custom_formats["pep517-backend-reference"](data__buildsystem__buildbackend): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be pep517-backend-reference", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='format') + if "backend-path" in data__buildsystem_keys: + data__buildsystem_keys.remove("backend-path") + data__buildsystem__backendpath = data__buildsystem["backend-path"] + if not isinstance(data__buildsystem__backendpath, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path must be array", value=data__buildsystem__backendpath, name="" + (name_prefix or "data") + ".build-system.backend-path", definition={'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}, rule='type') + data__buildsystem__backendpath_is_list = isinstance(data__buildsystem__backendpath, (list, tuple)) + if data__buildsystem__backendpath_is_list: + data__buildsystem__backendpath_len = len(data__buildsystem__backendpath) + for data__buildsystem__backendpath_x, data__buildsystem__backendpath_item in enumerate(data__buildsystem__backendpath): + if not isinstance(data__buildsystem__backendpath_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + " must be string", value=data__buildsystem__backendpath_item, name="" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + "", definition={'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}, rule='type') + if data__buildsystem_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must not contain "+str(data__buildsystem_keys)+" properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='additionalProperties') + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data__project, custom_formats, (name_prefix or "data") + ".project") + if "tool" in data_keys: + data_keys.remove("tool") + data__tool = data["tool"] + if not isinstance(data__tool, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type') + data__tool_is_dict = isinstance(data__tool, dict) + if data__tool_is_dict: + data__tool_keys = set(data__tool.keys()) + if "distutils" in data__tool_keys: + data__tool_keys.remove("distutils") + data__tool__distutils = data__tool["distutils"] + validate_https___docs_python_org_3_install(data__tool__distutils, custom_formats, (name_prefix or "data") + ".tool.distutils") + if "setuptools" in data__tool_keys: + data__tool_keys.remove("setuptools") + data__tool__setuptools = data__tool["setuptools"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools") + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "platforms" in data_keys: + data_keys.remove("platforms") + data__platforms = data["platforms"] + if not isinstance(data__platforms, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms must be array", value=data__platforms, name="" + (name_prefix or "data") + ".platforms", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__platforms_is_list = isinstance(data__platforms, (list, tuple)) + if data__platforms_is_list: + data__platforms_len = len(data__platforms) + for data__platforms_x, data__platforms_item in enumerate(data__platforms): + if not isinstance(data__platforms_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + " must be string", value=data__platforms_item, name="" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "provides" in data_keys: + data_keys.remove("provides") + data__provides = data["provides"] + if not isinstance(data__provides, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides must be array", value=data__provides, name="" + (name_prefix or "data") + ".provides", definition={'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type') + data__provides_is_list = isinstance(data__provides, (list, tuple)) + if data__provides_is_list: + data__provides_len = len(data__provides) + for data__provides_x, data__provides_item in enumerate(data__provides): + if not isinstance(data__provides_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be string", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__provides_item, str): + if not custom_formats["pep508-identifier"](data__provides_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be pep508-identifier", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format') + if "obsoletes" in data_keys: + data_keys.remove("obsoletes") + data__obsoletes = data["obsoletes"] + if not isinstance(data__obsoletes, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes must be array", value=data__obsoletes, name="" + (name_prefix or "data") + ".obsoletes", definition={'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type') + data__obsoletes_is_list = isinstance(data__obsoletes, (list, tuple)) + if data__obsoletes_is_list: + data__obsoletes_len = len(data__obsoletes) + for data__obsoletes_x, data__obsoletes_item in enumerate(data__obsoletes): + if not isinstance(data__obsoletes_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be string", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__obsoletes_item, str): + if not custom_formats["pep508-identifier"](data__obsoletes_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be pep508-identifier", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format') + if "zip-safe" in data_keys: + data_keys.remove("zip-safe") + data__zipsafe = data["zip-safe"] + if not isinstance(data__zipsafe, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".zip-safe must be boolean", value=data__zipsafe, name="" + (name_prefix or "data") + ".zip-safe", definition={'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, rule='type') + if "script-files" in data_keys: + data_keys.remove("script-files") + data__scriptfiles = data["script-files"] + if not isinstance(data__scriptfiles, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files must be array", value=data__scriptfiles, name="" + (name_prefix or "data") + ".script-files", definition={'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, rule='type') + data__scriptfiles_is_list = isinstance(data__scriptfiles, (list, tuple)) + if data__scriptfiles_is_list: + data__scriptfiles_len = len(data__scriptfiles) + for data__scriptfiles_x, data__scriptfiles_item in enumerate(data__scriptfiles): + if not isinstance(data__scriptfiles_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + " must be string", value=data__scriptfiles_item, name="" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "eager-resources" in data_keys: + data_keys.remove("eager-resources") + data__eagerresources = data["eager-resources"] + if not isinstance(data__eagerresources, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources must be array", value=data__eagerresources, name="" + (name_prefix or "data") + ".eager-resources", definition={'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__eagerresources_is_list = isinstance(data__eagerresources, (list, tuple)) + if data__eagerresources_is_list: + data__eagerresources_len = len(data__eagerresources) + for data__eagerresources_x, data__eagerresources_item in enumerate(data__eagerresources): + if not isinstance(data__eagerresources_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + " must be string", value=data__eagerresources_item, name="" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "packages" in data_keys: + data_keys.remove("packages") + data__packages = data["packages"] + data__packages_one_of_count1 = 0 + if data__packages_one_of_count1 < 2: + try: + if not isinstance(data__packages, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, rule='type') + data__packages_is_list = isinstance(data__packages, (list, tuple)) + if data__packages_is_list: + data__packages_len = len(data__packages) + for data__packages_x, data__packages_item in enumerate(data__packages): + if not isinstance(data__packages_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be string", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__packages_item, str): + if not custom_formats["python-module-name"](data__packages_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be python-module-name", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + data__packages_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data__packages_one_of_count1 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data__packages, custom_formats, (name_prefix or "data") + ".packages") + data__packages_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data__packages_one_of_count1 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf') + if "package-dir" in data_keys: + data_keys.remove("package-dir") + data__packagedir = data["package-dir"] + if not isinstance(data__packagedir, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type') + data__packagedir_is_dict = isinstance(data__packagedir, dict) + if data__packagedir_is_dict: + data__packagedir_keys = set(data__packagedir.keys()) + for data__packagedir_key, data__packagedir_val in data__packagedir.items(): + if REGEX_PATTERNS['^.*$'].search(data__packagedir_key): + if data__packagedir_key in data__packagedir_keys: + data__packagedir_keys.remove(data__packagedir_key) + if not isinstance(data__packagedir_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + " must be string", value=data__packagedir_val, name="" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if data__packagedir_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties') + data__packagedir_len = len(data__packagedir) + if data__packagedir_len != 0: + data__packagedir_property_names = True + for data__packagedir_key in data__packagedir: + try: + data__packagedir_key_one_of_count2 = 0 + if data__packagedir_key_one_of_count2 < 2: + try: + if isinstance(data__packagedir_key, str): + if not custom_formats["python-module-name"](data__packagedir_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be python-module-name", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'format': 'python-module-name'}, rule='format') + data__packagedir_key_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data__packagedir_key_one_of_count2 < 2: + try: + if data__packagedir_key != "": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be same as const definition: ", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'const': ''}, rule='const') + data__packagedir_key_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data__packagedir_key_one_of_count2 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be valid exactly by one definition" + (" (" + str(data__packagedir_key_one_of_count2) + " matches found)"), value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, rule='oneOf') + except JsonSchemaValueException: + data__packagedir_property_names = False + if not data__packagedir_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames') + if "package-data" in data_keys: + data_keys.remove("package-data") + data__packagedata = data["package-data"] + if not isinstance(data__packagedata, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be object", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__packagedata_is_dict = isinstance(data__packagedata, dict) + if data__packagedata_is_dict: + data__packagedata_keys = set(data__packagedata.keys()) + for data__packagedata_key, data__packagedata_val in data__packagedata.items(): + if REGEX_PATTERNS['^.*$'].search(data__packagedata_key): + if data__packagedata_key in data__packagedata_keys: + data__packagedata_keys.remove(data__packagedata_key) + if not isinstance(data__packagedata_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + " must be array", value=data__packagedata_val, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__packagedata_val_is_list = isinstance(data__packagedata_val, (list, tuple)) + if data__packagedata_val_is_list: + data__packagedata_val_len = len(data__packagedata_val) + for data__packagedata_val_x, data__packagedata_val_item in enumerate(data__packagedata_val): + if not isinstance(data__packagedata_val_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + " must be string", value=data__packagedata_val_item, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if data__packagedata_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must not contain "+str(data__packagedata_keys)+" properties", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties') + data__packagedata_len = len(data__packagedata) + if data__packagedata_len != 0: + data__packagedata_property_names = True + for data__packagedata_key in data__packagedata: + try: + data__packagedata_key_one_of_count3 = 0 + if data__packagedata_key_one_of_count3 < 2: + try: + if isinstance(data__packagedata_key, str): + if not custom_formats["python-module-name"](data__packagedata_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be python-module-name", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'format': 'python-module-name'}, rule='format') + data__packagedata_key_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data__packagedata_key_one_of_count3 < 2: + try: + if data__packagedata_key != "*": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be same as const definition: *", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'const': '*'}, rule='const') + data__packagedata_key_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data__packagedata_key_one_of_count3 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be valid exactly by one definition" + (" (" + str(data__packagedata_key_one_of_count3) + " matches found)"), value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf') + except JsonSchemaValueException: + data__packagedata_property_names = False + if not data__packagedata_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be named by propertyName definition", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames') + if "include-package-data" in data_keys: + data_keys.remove("include-package-data") + data__includepackagedata = data["include-package-data"] + if not isinstance(data__includepackagedata, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-package-data must be boolean", value=data__includepackagedata, name="" + (name_prefix or "data") + ".include-package-data", definition={'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, rule='type') + if "exclude-package-data" in data_keys: + data_keys.remove("exclude-package-data") + data__excludepackagedata = data["exclude-package-data"] + if not isinstance(data__excludepackagedata, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be object", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__excludepackagedata_is_dict = isinstance(data__excludepackagedata, dict) + if data__excludepackagedata_is_dict: + data__excludepackagedata_keys = set(data__excludepackagedata.keys()) + for data__excludepackagedata_key, data__excludepackagedata_val in data__excludepackagedata.items(): + if REGEX_PATTERNS['^.*$'].search(data__excludepackagedata_key): + if data__excludepackagedata_key in data__excludepackagedata_keys: + data__excludepackagedata_keys.remove(data__excludepackagedata_key) + if not isinstance(data__excludepackagedata_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + " must be array", value=data__excludepackagedata_val, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__excludepackagedata_val_is_list = isinstance(data__excludepackagedata_val, (list, tuple)) + if data__excludepackagedata_val_is_list: + data__excludepackagedata_val_len = len(data__excludepackagedata_val) + for data__excludepackagedata_val_x, data__excludepackagedata_val_item in enumerate(data__excludepackagedata_val): + if not isinstance(data__excludepackagedata_val_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + " must be string", value=data__excludepackagedata_val_item, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if data__excludepackagedata_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must not contain "+str(data__excludepackagedata_keys)+" properties", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties') + data__excludepackagedata_len = len(data__excludepackagedata) + if data__excludepackagedata_len != 0: + data__excludepackagedata_property_names = True + for data__excludepackagedata_key in data__excludepackagedata: + try: + data__excludepackagedata_key_one_of_count4 = 0 + if data__excludepackagedata_key_one_of_count4 < 2: + try: + if isinstance(data__excludepackagedata_key, str): + if not custom_formats["python-module-name"](data__excludepackagedata_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'format': 'python-module-name'}, rule='format') + data__excludepackagedata_key_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data__excludepackagedata_key_one_of_count4 < 2: + try: + if data__excludepackagedata_key != "*": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be same as const definition: *", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'const': '*'}, rule='const') + data__excludepackagedata_key_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data__excludepackagedata_key_one_of_count4 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be valid exactly by one definition" + (" (" + str(data__excludepackagedata_key_one_of_count4) + " matches found)"), value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf') + except JsonSchemaValueException: + data__excludepackagedata_property_names = False + if not data__excludepackagedata_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be named by propertyName definition", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames') + if "namespace-packages" in data_keys: + data_keys.remove("namespace-packages") + data__namespacepackages = data["namespace-packages"] + if not isinstance(data__namespacepackages, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, rule='type') + data__namespacepackages_is_list = isinstance(data__namespacepackages, (list, tuple)) + if data__namespacepackages_is_list: + data__namespacepackages_len = len(data__namespacepackages) + for data__namespacepackages_x, data__namespacepackages_item in enumerate(data__namespacepackages): + if not isinstance(data__namespacepackages_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be string", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__namespacepackages_item, str): + if not custom_formats["python-module-name"](data__namespacepackages_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be python-module-name", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + if "py-modules" in data_keys: + data_keys.remove("py-modules") + data__pymodules = data["py-modules"] + if not isinstance(data__pymodules, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules must be array", value=data__pymodules, name="" + (name_prefix or "data") + ".py-modules", definition={'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, rule='type') + data__pymodules_is_list = isinstance(data__pymodules, (list, tuple)) + if data__pymodules_is_list: + data__pymodules_len = len(data__pymodules) + for data__pymodules_x, data__pymodules_item in enumerate(data__pymodules): + if not isinstance(data__pymodules_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be string", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type') + if isinstance(data__pymodules_item, str): + if not custom_formats["python-module-name"](data__pymodules_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format') + if "data-files" in data_keys: + data_keys.remove("data-files") + data__datafiles = data["data-files"] + if not isinstance(data__datafiles, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files must be object", value=data__datafiles, name="" + (name_prefix or "data") + ".data-files", definition={'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data__datafiles_is_dict = isinstance(data__datafiles, dict) + if data__datafiles_is_dict: + data__datafiles_keys = set(data__datafiles.keys()) + for data__datafiles_key, data__datafiles_val in data__datafiles.items(): + if REGEX_PATTERNS['^.*$'].search(data__datafiles_key): + if data__datafiles_key in data__datafiles_keys: + data__datafiles_keys.remove(data__datafiles_key) + if not isinstance(data__datafiles_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + " must be array", value=data__datafiles_val, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__datafiles_val_is_list = isinstance(data__datafiles_val, (list, tuple)) + if data__datafiles_val_is_list: + data__datafiles_val_len = len(data__datafiles_val) + for data__datafiles_val_x, data__datafiles_val_item in enumerate(data__datafiles_val): + if not isinstance(data__datafiles_val_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + " must be string", value=data__datafiles_val_item, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "cmdclass" in data_keys: + data_keys.remove("cmdclass") + data__cmdclass = data["cmdclass"] + if not isinstance(data__cmdclass, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass must be object", value=data__cmdclass, name="" + (name_prefix or "data") + ".cmdclass", definition={'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, rule='type') + data__cmdclass_is_dict = isinstance(data__cmdclass, dict) + if data__cmdclass_is_dict: + data__cmdclass_keys = set(data__cmdclass.keys()) + for data__cmdclass_key, data__cmdclass_val in data__cmdclass.items(): + if REGEX_PATTERNS['^.*$'].search(data__cmdclass_key): + if data__cmdclass_key in data__cmdclass_keys: + data__cmdclass_keys.remove(data__cmdclass_key) + if not isinstance(data__cmdclass_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be string", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='type') + if isinstance(data__cmdclass_val, str): + if not custom_formats["python-qualified-identifier"](data__cmdclass_val): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be python-qualified-identifier", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='format') + if "license-files" in data_keys: + data_keys.remove("license-files") + data__licensefiles = data["license-files"] + if not isinstance(data__licensefiles, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files must be array", value=data__licensefiles, name="" + (name_prefix or "data") + ".license-files", definition={'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, rule='type') + data__licensefiles_is_list = isinstance(data__licensefiles, (list, tuple)) + if data__licensefiles_is_list: + data__licensefiles_len = len(data__licensefiles) + for data__licensefiles_x, data__licensefiles_item in enumerate(data__licensefiles): + if not isinstance(data__licensefiles_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + " must be string", value=data__licensefiles_item, name="" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + else: data["license-files"] = ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'] + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + if not isinstance(data__dynamic, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be object", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='type') + data__dynamic_is_dict = isinstance(data__dynamic, dict) + if data__dynamic_is_dict: + data__dynamic_keys = set(data__dynamic.keys()) + if "version" in data__dynamic_keys: + data__dynamic_keys.remove("version") + data__dynamic__version = data__dynamic["version"] + data__dynamic__version_one_of_count5 = 0 + if data__dynamic__version_one_of_count5 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version") + data__dynamic__version_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data__dynamic__version_one_of_count5 < 2: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version") + data__dynamic__version_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data__dynamic__version_one_of_count5 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count5) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf') + if "classifiers" in data__dynamic_keys: + data__dynamic_keys.remove("classifiers") + data__dynamic__classifiers = data__dynamic["classifiers"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__classifiers, custom_formats, (name_prefix or "data") + ".dynamic.classifiers") + if "description" in data__dynamic_keys: + data__dynamic_keys.remove("description") + data__dynamic__description = data__dynamic["description"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__description, custom_formats, (name_prefix or "data") + ".dynamic.description") + if "entry-points" in data__dynamic_keys: + data__dynamic_keys.remove("entry-points") + data__dynamic__entrypoints = data__dynamic["entry-points"] + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__entrypoints, custom_formats, (name_prefix or "data") + ".dynamic.entry-points") + if "readme" in data__dynamic_keys: + data__dynamic_keys.remove("readme") + data__dynamic__readme = data__dynamic["readme"] + data__dynamic__readme_any_of_count6 = 0 + if not data__dynamic__readme_any_of_count6: + try: + validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__readme, custom_formats, (name_prefix or "data") + ".dynamic.readme") + data__dynamic__readme_any_of_count6 += 1 + except JsonSchemaValueException: pass + if not data__dynamic__readme_any_of_count6: + try: + data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict) + if data__dynamic__readme_is_dict: + data__dynamic__readme_keys = set(data__dynamic__readme.keys()) + if "content-type" in data__dynamic__readme_keys: + data__dynamic__readme_keys.remove("content-type") + data__dynamic__readme__contenttype = data__dynamic__readme["content-type"] + if not isinstance(data__dynamic__readme__contenttype, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme.content-type must be string", value=data__dynamic__readme__contenttype, name="" + (name_prefix or "data") + ".dynamic.readme.content-type", definition={'type': 'string'}, rule='type') + data__dynamic__readme_any_of_count6 += 1 + except JsonSchemaValueException: pass + if not data__dynamic__readme_any_of_count6: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme cannot be validated by any definition", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='anyOf') + data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict) + if data__dynamic__readme_is_dict: + data__dynamic__readme_len = len(data__dynamic__readme) + if not all(prop in data__dynamic__readme for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must contain ['file'] properties", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='required') + if data__dynamic_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='additionalProperties') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['file'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='required') + data_keys = set(data.keys()) + if "file" in data_keys: + data_keys.remove("file") + data__file = data["file"] + data__file_one_of_count7 = 0 + if data__file_one_of_count7 < 2: + try: + if not isinstance(data__file, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be string", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'string'}, rule='type') + data__file_one_of_count7 += 1 + except JsonSchemaValueException: pass + if data__file_one_of_count7 < 2: + try: + if not isinstance(data__file, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be array", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__file_is_list = isinstance(data__file, (list, tuple)) + if data__file_is_list: + data__file_len = len(data__file) + for data__file_x, data__file_item in enumerate(data__file): + if not isinstance(data__file_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + " must be string", value=data__file_item, name="" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + data__file_one_of_count7 += 1 + except JsonSchemaValueException: pass + if data__file_one_of_count7 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count7) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['attr']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['attr'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='required') + data_keys = set(data.keys()) + if "attr" in data_keys: + data_keys.remove("attr") + data__attr = data["attr"] + if not isinstance(data__attr, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".attr must be string", value=data__attr, name="" + (name_prefix or "data") + ".attr", definition={'type': 'string'}, rule='type') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='additionalProperties') + return data + +def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "find" in data_keys: + data_keys.remove("find") + data__find = data["find"] + if not isinstance(data__find, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must be object", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='type') + data__find_is_dict = isinstance(data__find, dict) + if data__find_is_dict: + data__find_keys = set(data__find.keys()) + if "where" in data__find_keys: + data__find_keys.remove("where") + data__find__where = data__find["where"] + if not isinstance(data__find__where, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where must be array", value=data__find__where, name="" + (name_prefix or "data") + ".find.where", definition={'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__find__where_is_list = isinstance(data__find__where, (list, tuple)) + if data__find__where_is_list: + data__find__where_len = len(data__find__where) + for data__find__where_x, data__find__where_item in enumerate(data__find__where): + if not isinstance(data__find__where_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + " must be string", value=data__find__where_item, name="" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "exclude" in data__find_keys: + data__find_keys.remove("exclude") + data__find__exclude = data__find["exclude"] + if not isinstance(data__find__exclude, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude must be array", value=data__find__exclude, name="" + (name_prefix or "data") + ".find.exclude", definition={'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type') + data__find__exclude_is_list = isinstance(data__find__exclude, (list, tuple)) + if data__find__exclude_is_list: + data__find__exclude_len = len(data__find__exclude) + for data__find__exclude_x, data__find__exclude_item in enumerate(data__find__exclude): + if not isinstance(data__find__exclude_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + " must be string", value=data__find__exclude_item, name="" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "include" in data__find_keys: + data__find_keys.remove("include") + data__find__include = data__find["include"] + if not isinstance(data__find__include, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include must be array", value=data__find__include, name="" + (name_prefix or "data") + ".find.include", definition={'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type') + data__find__include_is_list = isinstance(data__find__include, (list, tuple)) + if data__find__include_is_list: + data__find__include_len = len(data__find__include) + for data__find__include_x, data__find__include_item in enumerate(data__find__include): + if not isinstance(data__find__include_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + " must be string", value=data__find__include_item, name="" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "namespaces" in data__find_keys: + data__find_keys.remove("namespaces") + data__find__namespaces = data__find["namespaces"] + if not isinstance(data__find__namespaces, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.namespaces must be boolean", value=data__find__namespaces, name="" + (name_prefix or "data") + ".find.namespaces", definition={'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}, rule='type') + if data__find_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must not contain "+str(data__find_keys)+" properties", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='additionalProperties') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='additionalProperties') + return data + +def validate_https___docs_python_org_3_install(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "global" in data_keys: + data_keys.remove("global") + data__global = data["global"] + if not isinstance(data__global, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".global must be object", value=data__global, name="" + (name_prefix or "data") + ".global", definition={'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}, rule='type') + for data_key, data_val in data.items(): + if REGEX_PATTERNS['.+'].search(data_key): + if data_key in data_keys: + data_keys.remove(data_key) + if not isinstance(data_val, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be object", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'object'}, rule='type') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='type') + if isinstance(data__name, str): + if not custom_formats["pep508-identifier"](data__name): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be pep508-identifier", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='format') + if "version" in data_keys: + data_keys.remove("version") + data__version = data["version"] + if not isinstance(data__version, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be string", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='type') + if isinstance(data__version, str): + if not custom_formats["pep440"](data__version): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be pep440", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='format') + if "description" in data_keys: + data_keys.remove("description") + data__description = data["description"] + if not isinstance(data__description, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".description must be string", value=data__description, name="" + (name_prefix or "data") + ".description", definition={'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, rule='type') + if "readme" in data_keys: + data_keys.remove("readme") + data__readme = data["readme"] + data__readme_one_of_count8 = 0 + if data__readme_one_of_count8 < 2: + try: + if not isinstance(data__readme, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type') + data__readme_one_of_count8 += 1 + except JsonSchemaValueException: pass + if data__readme_one_of_count8 < 2: + try: + if not isinstance(data__readme, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type') + data__readme_any_of_count9 = 0 + if not data__readme_any_of_count9: + try: + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['file'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "file" in data__readme_keys: + data__readme_keys.remove("file") + data__readme__file = data__readme["file"] + if not isinstance(data__readme__file, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type') + data__readme_any_of_count9 += 1 + except JsonSchemaValueException: pass + if not data__readme_any_of_count9: + try: + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['text']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['text'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "text" in data__readme_keys: + data__readme_keys.remove("text") + data__readme__text = data__readme["text"] + if not isinstance(data__readme__text, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type') + data__readme_any_of_count9 += 1 + except JsonSchemaValueException: pass + if not data__readme_any_of_count9: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf') + data__readme_is_dict = isinstance(data__readme, dict) + if data__readme_is_dict: + data__readme_len = len(data__readme) + if not all(prop in data__readme for prop in ['content-type']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['content-type'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}, rule='required') + data__readme_keys = set(data__readme.keys()) + if "content-type" in data__readme_keys: + data__readme_keys.remove("content-type") + data__readme__contenttype = data__readme["content-type"] + if not isinstance(data__readme__contenttype, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type') + data__readme_one_of_count8 += 1 + except JsonSchemaValueException: pass + if data__readme_one_of_count8 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count8) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf') + if "requires-python" in data_keys: + data_keys.remove("requires-python") + data__requirespython = data["requires-python"] + if not isinstance(data__requirespython, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be string", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, rule='type') + if isinstance(data__requirespython, str): + if not custom_formats["pep508-versionspec"](data__requirespython): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be pep508-versionspec", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, rule='format') + if "license" in data_keys: + data_keys.remove("license") + data__license = data["license"] + data__license_one_of_count10 = 0 + if data__license_one_of_count10 < 2: + try: + data__license_is_dict = isinstance(data__license, dict) + if data__license_is_dict: + data__license_len = len(data__license) + if not all(prop in data__license for prop in ['file']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['file'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required') + data__license_keys = set(data__license.keys()) + if "file" in data__license_keys: + data__license_keys.remove("file") + data__license__file = data__license["file"] + if not isinstance(data__license__file, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type') + data__license_one_of_count10 += 1 + except JsonSchemaValueException: pass + if data__license_one_of_count10 < 2: + try: + data__license_is_dict = isinstance(data__license, dict) + if data__license_is_dict: + data__license_len = len(data__license) + if not all(prop in data__license for prop in ['text']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['text'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}, rule='required') + data__license_keys = set(data__license.keys()) + if "text" in data__license_keys: + data__license_keys.remove("text") + data__license__text = data__license["text"] + if not isinstance(data__license__text, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}, rule='type') + data__license_one_of_count10 += 1 + except JsonSchemaValueException: pass + if data__license_one_of_count10 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count10) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, rule='oneOf') + if "authors" in data_keys: + data_keys.remove("authors") + data__authors = data["authors"] + if not isinstance(data__authors, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authors must be array", value=data__authors, name="" + (name_prefix or "data") + ".authors", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, rule='type') + data__authors_is_list = isinstance(data__authors, (list, tuple)) + if data__authors_is_list: + data__authors_len = len(data__authors) + for data__authors_x, data__authors_item in enumerate(data__authors): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__authors_item, custom_formats, (name_prefix or "data") + ".authors[{data__authors_x}]") + if "maintainers" in data_keys: + data_keys.remove("maintainers") + data__maintainers = data["maintainers"] + if not isinstance(data__maintainers, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maintainers must be array", value=data__maintainers, name="" + (name_prefix or "data") + ".maintainers", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, rule='type') + data__maintainers_is_list = isinstance(data__maintainers, (list, tuple)) + if data__maintainers_is_list: + data__maintainers_len = len(data__maintainers) + for data__maintainers_x, data__maintainers_item in enumerate(data__maintainers): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__maintainers_item, custom_formats, (name_prefix or "data") + ".maintainers[{data__maintainers_x}]") + if "keywords" in data_keys: + data_keys.remove("keywords") + data__keywords = data["keywords"] + if not isinstance(data__keywords, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords must be array", value=data__keywords, name="" + (name_prefix or "data") + ".keywords", definition={'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, rule='type') + data__keywords_is_list = isinstance(data__keywords, (list, tuple)) + if data__keywords_is_list: + data__keywords_len = len(data__keywords) + for data__keywords_x, data__keywords_item in enumerate(data__keywords): + if not isinstance(data__keywords_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + " must be string", value=data__keywords_item, name="" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "classifiers" in data_keys: + data_keys.remove("classifiers") + data__classifiers = data["classifiers"] + if not isinstance(data__classifiers, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers must be array", value=data__classifiers, name="" + (name_prefix or "data") + ".classifiers", definition={'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, rule='type') + data__classifiers_is_list = isinstance(data__classifiers, (list, tuple)) + if data__classifiers_is_list: + data__classifiers_len = len(data__classifiers) + for data__classifiers_x, data__classifiers_item in enumerate(data__classifiers): + if not isinstance(data__classifiers_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be string", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, rule='type') + if isinstance(data__classifiers_item, str): + if not custom_formats["trove-classifier"](data__classifiers_item): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be trove-classifier", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, rule='format') + if "urls" in data_keys: + data_keys.remove("urls") + data__urls = data["urls"] + if not isinstance(data__urls, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must be object", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='type') + data__urls_is_dict = isinstance(data__urls, dict) + if data__urls_is_dict: + data__urls_keys = set(data__urls.keys()) + for data__urls_key, data__urls_val in data__urls.items(): + if REGEX_PATTERNS['^.+$'].search(data__urls_key): + if data__urls_key in data__urls_keys: + data__urls_keys.remove(data__urls_key) + if not isinstance(data__urls_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be string", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='type') + if isinstance(data__urls_val, str): + if not custom_formats["url"](data__urls_val): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be url", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='format') + if data__urls_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must not contain "+str(data__urls_keys)+" properties", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='additionalProperties') + if "scripts" in data_keys: + data_keys.remove("scripts") + data__scripts = data["scripts"] + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__scripts, custom_formats, (name_prefix or "data") + ".scripts") + if "gui-scripts" in data_keys: + data_keys.remove("gui-scripts") + data__guiscripts = data["gui-scripts"] + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__guiscripts, custom_formats, (name_prefix or "data") + ".gui-scripts") + if "entry-points" in data_keys: + data_keys.remove("entry-points") + data__entrypoints = data["entry-points"] + data__entrypoints_is_dict = isinstance(data__entrypoints, dict) + if data__entrypoints_is_dict: + data__entrypoints_keys = set(data__entrypoints.keys()) + for data__entrypoints_key, data__entrypoints_val in data__entrypoints.items(): + if REGEX_PATTERNS['^.+$'].search(data__entrypoints_key): + if data__entrypoints_key in data__entrypoints_keys: + data__entrypoints_keys.remove(data__entrypoints_key) + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__entrypoints_val, custom_formats, (name_prefix or "data") + ".entry-points.{data__entrypoints_key}") + if data__entrypoints_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must not contain "+str(data__entrypoints_keys)+" properties", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='additionalProperties') + data__entrypoints_len = len(data__entrypoints) + if data__entrypoints_len != 0: + data__entrypoints_property_names = True + for data__entrypoints_key in data__entrypoints: + try: + if isinstance(data__entrypoints_key, str): + if not custom_formats["python-entrypoint-group"](data__entrypoints_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be python-entrypoint-group", value=data__entrypoints_key, name="" + (name_prefix or "data") + ".entry-points", definition={'format': 'python-entrypoint-group'}, rule='format') + except JsonSchemaValueException: + data__entrypoints_property_names = False + if not data__entrypoints_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be named by propertyName definition", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='propertyNames') + if "dependencies" in data_keys: + data_keys.remove("dependencies") + data__dependencies = data["dependencies"] + if not isinstance(data__dependencies, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependencies must be array", value=data__dependencies, name="" + (name_prefix or "data") + ".dependencies", definition={'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type') + data__dependencies_is_list = isinstance(data__dependencies, (list, tuple)) + if data__dependencies_is_list: + data__dependencies_len = len(data__dependencies) + for data__dependencies_x, data__dependencies_item in enumerate(data__dependencies): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__dependencies_item, custom_formats, (name_prefix or "data") + ".dependencies[{data__dependencies_x}]") + if "optional-dependencies" in data_keys: + data_keys.remove("optional-dependencies") + data__optionaldependencies = data["optional-dependencies"] + if not isinstance(data__optionaldependencies, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be object", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type') + data__optionaldependencies_is_dict = isinstance(data__optionaldependencies, dict) + if data__optionaldependencies_is_dict: + data__optionaldependencies_keys = set(data__optionaldependencies.keys()) + for data__optionaldependencies_key, data__optionaldependencies_val in data__optionaldependencies.items(): + if REGEX_PATTERNS['^.+$'].search(data__optionaldependencies_key): + if data__optionaldependencies_key in data__optionaldependencies_keys: + data__optionaldependencies_keys.remove(data__optionaldependencies_key) + if not isinstance(data__optionaldependencies_val, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + " must be array", value=data__optionaldependencies_val, name="" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type') + data__optionaldependencies_val_is_list = isinstance(data__optionaldependencies_val, (list, tuple)) + if data__optionaldependencies_val_is_list: + data__optionaldependencies_val_len = len(data__optionaldependencies_val) + for data__optionaldependencies_val_x, data__optionaldependencies_val_item in enumerate(data__optionaldependencies_val): + validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__optionaldependencies_val_item, custom_formats, (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}[{data__optionaldependencies_val_x}]") + if data__optionaldependencies_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must not contain "+str(data__optionaldependencies_keys)+" properties", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties') + data__optionaldependencies_len = len(data__optionaldependencies) + if data__optionaldependencies_len != 0: + data__optionaldependencies_property_names = True + for data__optionaldependencies_key in data__optionaldependencies: + try: + if isinstance(data__optionaldependencies_key, str): + if not custom_formats["pep508-identifier"](data__optionaldependencies_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be pep508-identifier", value=data__optionaldependencies_key, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'format': 'pep508-identifier'}, rule='format') + except JsonSchemaValueException: + data__optionaldependencies_property_names = False + if not data__optionaldependencies_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be named by propertyName definition", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='propertyNames') + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + if not isinstance(data__dynamic, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be array", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}, rule='type') + data__dynamic_is_list = isinstance(data__dynamic, (list, tuple)) + if data__dynamic_is_list: + data__dynamic_len = len(data__dynamic) + for data__dynamic_x, data__dynamic_item in enumerate(data__dynamic): + if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + " must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name="" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + "", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties') + try: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['dynamic']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['dynamic'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, rule='required') + data_keys = set(data.keys()) + if "dynamic" in data_keys: + data_keys.remove("dynamic") + data__dynamic = data["dynamic"] + data__dynamic_is_list = isinstance(data__dynamic, (list, tuple)) + if data__dynamic_is_list: + data__dynamic_contains = False + for data__dynamic_key in data__dynamic: + try: + if data__dynamic_key != "version": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be same as const definition: version", value=data__dynamic_key, name="" + (name_prefix or "data") + ".dynamic", definition={'const': 'version'}, rule='const') + data__dynamic_contains = True + break + except JsonSchemaValueException: pass + if not data__dynamic_contains: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must contain one of contains definition", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}, rule='contains') + except JsonSchemaValueException: pass + else: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must NOT match a disallowed definition", value=data, name="" + (name_prefix or "data") + "", definition={'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, rule='not') + except JsonSchemaValueException: + pass + else: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['version']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['version'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, rule='required') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='type') + if isinstance(data, str): + if not custom_formats["pep508"](data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep508", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='format') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key, data_val in data.items(): + if REGEX_PATTERNS['^.+$'].search(data_key): + if data_key in data_keys: + data_keys.remove(data_key) + if not isinstance(data_val, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='type') + if isinstance(data_val, str): + if not custom_formats["python-entrypoint-reference"](data_val): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be python-entrypoint-reference", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='format') + if data_keys: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='additionalProperties') + data_len = len(data) + if data_len != 0: + data_property_names = True + for data_key in data: + try: + if isinstance(data_key, str): + if not custom_formats["python-entrypoint-name"](data_key): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-entrypoint-name", value=data_key, name="" + (name_prefix or "data") + "", definition={'format': 'python-entrypoint-name'}, rule='format') + except JsonSchemaValueException: + data_property_names = False + if not data_property_names: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be named by propertyName definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='propertyNames') + return data + +def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, rule='type') + if "email" in data_keys: + data_keys.remove("email") + data__email = data["email"] + if not isinstance(data__email, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be string", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='type') + if isinstance(data__email, str): + if not REGEX_PATTERNS["idn-email_re_pattern"].match(data__email): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be idn-email", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='format') + return data
\ No newline at end of file diff --git a/setuptools/config/_validate_pyproject/formats.py b/setuptools/config/_validate_pyproject/formats.py new file mode 100644 index 00000000..f41fce38 --- /dev/null +++ b/setuptools/config/_validate_pyproject/formats.py @@ -0,0 +1,257 @@ +import logging +import os +import re +import string +import typing +from itertools import chain as _chain + +_logger = logging.getLogger(__name__) + +# ------------------------------------------------------------------------------------- +# PEP 440 + +VERSION_PATTERN = r""" + v? + (?: + (?:(?P<epoch>[0-9]+)!)? # epoch + (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment + (?P<pre> # pre-release + [-_\.]? + (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) + [-_\.]? + (?P<pre_n>[0-9]+)? + )? + (?P<post> # post release + (?:-(?P<post_n1>[0-9]+)) + | + (?: + [-_\.]? + (?P<post_l>post|rev|r) + [-_\.]? + (?P<post_n2>[0-9]+)? + ) + )? + (?P<dev> # dev release + [-_\.]? + (?P<dev_l>dev) + [-_\.]? + (?P<dev_n>[0-9]+)? + )? + ) + (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version +""" + +VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I) + + +def pep440(version: str) -> bool: + return VERSION_REGEX.match(version) is not None + + +# ------------------------------------------------------------------------------------- +# PEP 508 + +PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])" +PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I) + + +def pep508_identifier(name: str) -> bool: + return PEP508_IDENTIFIER_REGEX.match(name) is not None + + +try: + try: + from packaging import requirements as _req + except ImportError: # pragma: no cover + # let's try setuptools vendored version + from setuptools._vendor.packaging import requirements as _req # type: ignore + + def pep508(value: str) -> bool: + try: + _req.Requirement(value) + return True + except _req.InvalidRequirement: + return False + +except ImportError: # pragma: no cover + _logger.warning( + "Could not find an installation of `packaging`. Requirements, dependencies and " + "versions might not be validated. " + "To enforce validation, please install `packaging`." + ) + + def pep508(value: str) -> bool: + return True + + +def pep508_versionspec(value: str) -> bool: + """Expression that can be used to specify/lock versions (including ranges)""" + if any(c in value for c in (";", "]", "@")): + # In PEP 508: + # conditional markers, extras and URL specs are not included in the + # versionspec + return False + # Let's pretend we have a dependency called `requirement` with the given + # version spec, then we can re-use the pep508 function for validation: + return pep508(f"requirement{value}") + + +# ------------------------------------------------------------------------------------- +# PEP 517 + + +def pep517_backend_reference(value: str) -> bool: + module, _, obj = value.partition(":") + identifiers = (i.strip() for i in _chain(module.split("."), obj.split("."))) + return all(python_identifier(i) for i in identifiers if i) + + +# ------------------------------------------------------------------------------------- +# Classifiers - PEP 301 + + +def _download_classifiers() -> str: + import cgi + from urllib.request import urlopen + + url = "https://pypi.org/pypi?:action=list_classifiers" + with urlopen(url) as response: + content_type = response.getheader("content-type", "text/plain") + encoding = cgi.parse_header(content_type)[1].get("charset", "utf-8") + return response.read().decode(encoding) + + +class _TroveClassifier: + """The ``trove_classifiers`` package is the official way of validating classifiers, + however this package might not be always available. + As a workaround we can still download a list from PyPI. + We also don't want to be over strict about it, so simply skipping silently is an + option (classifiers will be validated anyway during the upload to PyPI). + """ + + def __init__(self): + self.downloaded: typing.Union[None, False, typing.Set[str]] = None + self._skip_download = False + # None => not cached yet + # False => cache not available + self.__name__ = "trove_classifier" # Emulate a public function + + def _disable_download(self): + # This is a private API. Only setuptools has the consent of using it. + self._skip_download = True + + def __call__(self, value: str) -> bool: + if self.downloaded is False or self._skip_download is True: + return True + + if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"): + self.downloaded = False + msg = ( + "Install ``trove-classifiers`` to ensure proper validation. " + "Skipping download of classifiers list from PyPI (NO_NETWORK)." + ) + _logger.debug(msg) + return True + + if self.downloaded is None: + msg = ( + "Install ``trove-classifiers`` to ensure proper validation. " + "Meanwhile a list of classifiers will be downloaded from PyPI." + ) + _logger.debug(msg) + try: + self.downloaded = set(_download_classifiers().splitlines()) + except Exception: + self.downloaded = False + _logger.debug("Problem with download, skipping validation") + return True + + return value in self.downloaded or value.lower().startswith("private ::") + + +try: + from trove_classifiers import classifiers as _trove_classifiers + + def trove_classifier(value: str) -> bool: + return value in _trove_classifiers or value.lower().startswith("private ::") + +except ImportError: # pragma: no cover + trove_classifier = _TroveClassifier() + + +# ------------------------------------------------------------------------------------- +# Non-PEP related + + +def url(value: str) -> bool: + from urllib.parse import urlparse + + try: + parts = urlparse(value) + if not parts.scheme: + _logger.warning( + "For maximum compatibility please make sure to include a " + "`scheme` prefix in your URL (e.g. 'http://'). " + f"Given value: {value}" + ) + if not (value.startswith("/") or value.startswith("\\") or "@" in value): + parts = urlparse(f"http://{value}") + + return bool(parts.scheme and parts.netloc) + except Exception: + return False + + +# https://packaging.python.org/specifications/entry-points/ +ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?" +ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I) +RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+" +RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I) +ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*" +ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I) + + +def python_identifier(value: str) -> bool: + return value.isidentifier() + + +def python_qualified_identifier(value: str) -> bool: + if value.startswith(".") or value.endswith("."): + return False + return all(python_identifier(m) for m in value.split(".")) + + +def python_module_name(value: str) -> bool: + return python_qualified_identifier(value) + + +def python_entrypoint_group(value: str) -> bool: + return ENTRYPOINT_GROUP_REGEX.match(value) is not None + + +def python_entrypoint_name(value: str) -> bool: + if not ENTRYPOINT_REGEX.match(value): + return False + if not RECOMMEDED_ENTRYPOINT_REGEX.match(value): + msg = f"Entry point `{value}` does not follow recommended pattern: " + msg += RECOMMEDED_ENTRYPOINT_PATTERN + _logger.warning(msg) + return True + + +def python_entrypoint_reference(value: str) -> bool: + module, _, rest = value.partition(":") + if "[" in rest: + obj, _, extras_ = rest.partition("[") + if extras_.strip()[-1] != "]": + return False + extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(",")) + if not all(pep508_identifier(e) for e in extras): + return False + _logger.warning(f"`{value}` - using extras for entry points is not recommended") + else: + obj = rest + + module_parts = module.split(".") + identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts + return all(python_identifier(i.strip()) for i in identifiers) diff --git a/setuptools/config/expand.py b/setuptools/config/expand.py new file mode 100644 index 00000000..ff9b2c9b --- /dev/null +++ b/setuptools/config/expand.py @@ -0,0 +1,458 @@ +"""Utility functions to expand configuration directives or special values +(such glob patterns). + +We can split the process of interpreting configuration files into 2 steps: + +1. The parsing the file contents from strings to value objects + that can be understand by Python (for example a string with a comma + separated list of keywords into an actual Python list of strings). + +2. The expansion (or post-processing) of these values according to the + semantics ``setuptools`` assign to them (for example a configuration field + with the ``file:`` directive should be expanded from a list of file paths to + a single string with the contents of those files concatenated) + +This module focus on the second step, and therefore allow sharing the expansion +functions among several configuration file formats. +""" +import ast +import importlib +import io +import os +import sys +import warnings +from glob import iglob +from configparser import ConfigParser +from importlib.machinery import ModuleSpec +from itertools import chain +from typing import ( + TYPE_CHECKING, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Tuple, + TypeVar, + Union, + cast +) +from types import ModuleType + +from distutils.errors import DistutilsOptionError + +if TYPE_CHECKING: + from setuptools.dist import Distribution # noqa + from setuptools.discovery import ConfigDiscovery # noqa + from distutils.dist import DistributionMetadata # noqa + +chain_iter = chain.from_iterable +_Path = Union[str, os.PathLike] +_K = TypeVar("_K") +_V = TypeVar("_V", covariant=True) + + +class StaticModule: + """Proxy to a module object that avoids executing arbitrary code.""" + + def __init__(self, name: str, spec: ModuleSpec): + with open(spec.origin) as strm: # type: ignore + src = strm.read() + module = ast.parse(src) + vars(self).update(locals()) + del self.self + + def __getattr__(self, attr): + """Attempt to load an attribute "statically", via :func:`ast.literal_eval`.""" + try: + assignment_expressions = ( + statement + for statement in self.module.body + if isinstance(statement, ast.Assign) + ) + expressions_with_target = ( + (statement, target) + for statement in assignment_expressions + for target in statement.targets + ) + matching_values = ( + statement.value + for statement, target in expressions_with_target + if isinstance(target, ast.Name) and target.id == attr + ) + return next(ast.literal_eval(value) for value in matching_values) + except Exception as e: + raise AttributeError(f"{self.name} has no attribute {attr}") from e + + +def glob_relative( + patterns: Iterable[str], root_dir: Optional[_Path] = None +) -> List[str]: + """Expand the list of glob patterns, but preserving relative paths. + + :param list[str] patterns: List of glob patterns + :param str root_dir: Path to which globs should be relative + (current directory by default) + :rtype: list + """ + glob_characters = {'*', '?', '[', ']', '{', '}'} + expanded_values = [] + root_dir = root_dir or os.getcwd() + for value in patterns: + + # Has globby characters? + if any(char in value for char in glob_characters): + # then expand the glob pattern while keeping paths *relative*: + glob_path = os.path.abspath(os.path.join(root_dir, value)) + expanded_values.extend(sorted( + os.path.relpath(path, root_dir).replace(os.sep, "/") + for path in iglob(glob_path, recursive=True))) + + else: + # take the value as-is + path = os.path.relpath(value, root_dir).replace(os.sep, "/") + expanded_values.append(path) + + return expanded_values + + +def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) -> str: + """Return the content of the files concatenated using ``\n`` as str + + This function is sandboxed and won't reach anything outside ``root_dir`` + + (By default ``root_dir`` is the current directory). + """ + from setuptools.extern.more_itertools import always_iterable + + root_dir = os.path.abspath(root_dir or os.getcwd()) + _filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths)) + return '\n'.join( + _read_file(path) + for path in _filter_existing_files(_filepaths) + if _assert_local(path, root_dir) + ) + + +def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]: + for path in filepaths: + if os.path.isfile(path): + yield path + else: + warnings.warn(f"File {path!r} cannot be found") + + +def _read_file(filepath: Union[bytes, _Path]) -> str: + with io.open(filepath, encoding='utf-8') as f: + return f.read() + + +def _assert_local(filepath: _Path, root_dir: str): + if not os.path.abspath(filepath).startswith(root_dir): + msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})" + raise DistutilsOptionError(msg) + + return True + + +def read_attr( + attr_desc: str, + package_dir: Optional[Mapping[str, str]] = None, + root_dir: Optional[_Path] = None +): + """Reads the value of an attribute from a module. + + This function will try to read the attributed statically first + (via :func:`ast.literal_eval`), and only evaluate the module if it fails. + + Examples: + read_attr("package.attr") + read_attr("package.module.attr") + + :param str attr_desc: Dot-separated string describing how to reach the + attribute (see examples above) + :param dict[str, str] package_dir: Mapping of package names to their + location in disk (represented by paths relative to ``root_dir``). + :param str root_dir: Path to directory containing all the packages in + ``package_dir`` (current directory by default). + :rtype: str + """ + root_dir = root_dir or os.getcwd() + attrs_path = attr_desc.strip().split('.') + attr_name = attrs_path.pop() + module_name = '.'.join(attrs_path) + module_name = module_name or '__init__' + _parent_path, path, module_name = _find_module(module_name, package_dir, root_dir) + spec = _find_spec(module_name, path) + + try: + return getattr(StaticModule(module_name, spec), attr_name) + except Exception: + # fallback to evaluate module + module = _load_spec(spec, module_name) + return getattr(module, attr_name) + + +def _find_spec(module_name: str, module_path: Optional[_Path]) -> ModuleSpec: + spec = importlib.util.spec_from_file_location(module_name, module_path) + spec = spec or importlib.util.find_spec(module_name) + + if spec is None: + raise ModuleNotFoundError(module_name) + + return spec + + +def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType: + name = getattr(spec, "__name__", module_name) + if name in sys.modules: + return sys.modules[name] + module = importlib.util.module_from_spec(spec) + sys.modules[name] = module # cache (it also ensures `==` works on loaded items) + spec.loader.exec_module(module) # type: ignore + return module + + +def _find_module( + module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: _Path +) -> Tuple[_Path, Optional[str], str]: + """Given a module (that could normally be imported by ``module_name`` + after the build is complete), find the path to the parent directory where + it is contained and the canonical name that could be used to import it + considering the ``package_dir`` in the build configuration and ``root_dir`` + """ + parent_path = root_dir + module_parts = module_name.split('.') + if package_dir: + if module_parts[0] in package_dir: + # A custom path was specified for the module we want to import + custom_path = package_dir[module_parts[0]] + parts = custom_path.rsplit('/', 1) + if len(parts) > 1: + parent_path = os.path.join(root_dir, parts[0]) + parent_module = parts[1] + else: + parent_module = custom_path + module_name = ".".join([parent_module, *module_parts[1:]]) + elif '' in package_dir: + # A custom parent directory was specified for all root modules + parent_path = os.path.join(root_dir, package_dir['']) + + path_start = os.path.join(parent_path, *module_name.split(".")) + candidates = chain( + (f"{path_start}.py", os.path.join(path_start, "__init__.py")), + iglob(f"{path_start}.*") + ) + module_path = next((x for x in candidates if os.path.isfile(x)), None) + return parent_path, module_path, module_name + + +def resolve_class( + qualified_class_name: str, + package_dir: Optional[Mapping[str, str]] = None, + root_dir: Optional[_Path] = None +) -> Callable: + """Given a qualified class name, return the associated class object""" + root_dir = root_dir or os.getcwd() + idx = qualified_class_name.rfind('.') + class_name = qualified_class_name[idx + 1 :] + pkg_name = qualified_class_name[:idx] + + _parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir) + module = _load_spec(_find_spec(module_name, path), module_name) + return getattr(module, class_name) + + +def cmdclass( + values: Dict[str, str], + package_dir: Optional[Mapping[str, str]] = None, + root_dir: Optional[_Path] = None +) -> Dict[str, Callable]: + """Given a dictionary mapping command names to strings for qualified class + names, apply :func:`resolve_class` to the dict values. + """ + return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()} + + +def find_packages( + *, + namespaces=True, + fill_package_dir: Optional[Dict[str, str]] = None, + root_dir: Optional[_Path] = None, + **kwargs +) -> List[str]: + """Works similarly to :func:`setuptools.find_packages`, but with all + arguments given as keyword arguments. Moreover, ``where`` can be given + as a list (the results will be simply concatenated). + + When the additional keyword argument ``namespaces`` is ``True``, it will + behave like :func:`setuptools.find_namespace_packages`` (i.e. include + implicit namespaces as per :pep:`420`). + + The ``where`` argument will be considered relative to ``root_dir`` (or the current + working directory when ``root_dir`` is not given). + + If the ``fill_package_dir`` argument is passed, this function will consider it as a + similar data structure to the ``package_dir`` configuration parameter add fill-in + any missing package location. + + :rtype: list + """ + from setuptools.discovery import construct_package_dir + from setuptools.extern.more_itertools import unique_everseen, always_iterable + + if namespaces: + from setuptools.discovery import PEP420PackageFinder as PackageFinder + else: + from setuptools.discovery import PackageFinder # type: ignore + + root_dir = root_dir or os.curdir + where = kwargs.pop('where', ['.']) + packages: List[str] = [] + fill_package_dir = {} if fill_package_dir is None else fill_package_dir + + for path in unique_everseen(always_iterable(where)): + package_path = _nest_path(root_dir, path) + pkgs = PackageFinder.find(package_path, **kwargs) + packages.extend(pkgs) + if pkgs and not ( + fill_package_dir.get("") == path + or os.path.samefile(package_path, root_dir) + ): + fill_package_dir.update(construct_package_dir(pkgs, path)) + + return packages + + +def _nest_path(parent: _Path, path: _Path) -> str: + path = parent if path == "." else os.path.join(parent, path) + return os.path.normpath(path) + + +def version(value: Union[Callable, Iterable[Union[str, int]], str]) -> str: + """When getting the version directly from an attribute, + it should be normalised to string. + """ + if callable(value): + value = value() + + value = cast(Iterable[Union[str, int]], value) + + if not isinstance(value, str): + if hasattr(value, '__iter__'): + value = '.'.join(map(str, value)) + else: + value = '%s' % value + + return value + + +def canonic_package_data(package_data: dict) -> dict: + if "*" in package_data: + package_data[""] = package_data.pop("*") + return package_data + + +def canonic_data_files( + data_files: Union[list, dict], root_dir: Optional[_Path] = None +) -> List[Tuple[str, List[str]]]: + """For compatibility with ``setup.py``, ``data_files`` should be a list + of pairs instead of a dict. + + This function also expands glob patterns. + """ + if isinstance(data_files, list): + return data_files + + return [ + (dest, glob_relative(patterns, root_dir)) + for dest, patterns in data_files.items() + ] + + +def entry_points(text: str, text_source="entry-points") -> Dict[str, dict]: + """Given the contents of entry-points file, + process it into a 2-level dictionary (``dict[str, dict[str, str]]``). + The first level keys are entry-point groups, the second level keys are + entry-point names, and the second level values are references to objects + (that correspond to the entry-point value). + """ + parser = ConfigParser(default_section=None, delimiters=("=",)) # type: ignore + parser.optionxform = str # case sensitive + parser.read_string(text, text_source) + groups = {k: dict(v.items()) for k, v in parser.items()} + groups.pop(parser.default_section, None) + return groups + + +class EnsurePackagesDiscovered: + """Some expand functions require all the packages to already be discovered before + they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`. + + Therefore in some cases we will need to run autodiscovery during the evaluation of + the configuration. However, it is better to postpone calling package discovery as + much as possible, because some parameters can influence it (e.g. ``package_dir``), + and those might not have been processed yet. + """ + + def __init__(self, distribution: "Distribution"): + self._dist = distribution + self._called = False + + def __call__(self): + """Trigger the automatic package discovery, if it is still necessary.""" + if not self._called: + self._called = True + self._dist.set_defaults(name=False) # Skip name, we can still be parsing + + def __enter__(self): + return self + + def __exit__(self, _exc_type, _exc_value, _traceback): + if self._called: + self._dist.set_defaults.analyse_name() # Now we can set a default name + + def _get_package_dir(self) -> Mapping[str, str]: + self() + pkg_dir = self._dist.package_dir + return {} if pkg_dir is None else pkg_dir + + @property + def package_dir(self) -> Mapping[str, str]: + """Proxy to ``package_dir`` that may trigger auto-discovery when used.""" + return LazyMappingProxy(self._get_package_dir) + + +class LazyMappingProxy(Mapping[_K, _V]): + """Mapping proxy that delays resolving the target object, until really needed. + + >>> def obtain_mapping(): + ... print("Running expensive function!") + ... return {"key": "value", "other key": "other value"} + >>> mapping = LazyMappingProxy(obtain_mapping) + >>> mapping["key"] + Running expensive function! + 'value' + >>> mapping["other key"] + 'other value' + """ + + def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]): + self._obtain = obtain_mapping_value + self._value: Optional[Mapping[_K, _V]] = None + + def _target(self) -> Mapping[_K, _V]: + if self._value is None: + self._value = self._obtain() + return self._value + + def __getitem__(self, key: _K) -> _V: + return self._target()[key] + + def __len__(self) -> int: + return len(self._target()) + + def __iter__(self) -> Iterator[_K]: + return iter(self._target()) diff --git a/setuptools/config/pyprojecttoml.py b/setuptools/config/pyprojecttoml.py new file mode 100644 index 00000000..d4024956 --- /dev/null +++ b/setuptools/config/pyprojecttoml.py @@ -0,0 +1,435 @@ +"""Load setuptools configuration from ``pyproject.toml`` files""" +import logging +import os +import warnings +from contextlib import contextmanager +from functools import partial +from typing import TYPE_CHECKING, Callable, Dict, Optional, Mapping, Union + +from setuptools.errors import FileError, OptionError + +from . import expand as _expand +from ._apply_pyprojecttoml import apply as _apply +from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _WouldIgnoreField + +if TYPE_CHECKING: + from setuptools.dist import Distribution # noqa + +_Path = Union[str, os.PathLike] +_logger = logging.getLogger(__name__) + + +def load_file(filepath: _Path) -> dict: + from setuptools.extern import tomli # type: ignore + + with open(filepath, "rb") as file: + return tomli.load(file) + + +def validate(config: dict, filepath: _Path) -> bool: + from . import _validate_pyproject as validator + + trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier") + if hasattr(trove_classifier, "_disable_download"): + # Improve reproducibility by default. See issue 31 for validate-pyproject. + trove_classifier._disable_download() # type: ignore + + try: + return validator.validate(config) + except validator.ValidationError as ex: + _logger.error(f"configuration error: {ex.summary}") # type: ignore + _logger.debug(ex.details) # type: ignore + error = ValueError(f"invalid pyproject.toml config: {ex.name}") # type: ignore + raise error from None + + +def apply_configuration( + dist: "Distribution", + filepath: _Path, + ignore_option_errors=False, +) -> "Distribution": + """Apply the configuration from a ``pyproject.toml`` file into an existing + distribution object. + """ + config = read_configuration(filepath, True, ignore_option_errors, dist) + return _apply(dist, config, filepath) + + +def read_configuration( + filepath: _Path, + expand=True, + ignore_option_errors=False, + dist: Optional["Distribution"] = None, +): + """Read given configuration file and returns options from it as a dict. + + :param str|unicode filepath: Path to configuration file in the ``pyproject.toml`` + format. + + :param bool expand: Whether to expand directives and other computed values + (i.e. post-process the given configuration) + + :param bool ignore_option_errors: Whether to silently ignore + options, values of which could not be resolved (e.g. due to exceptions + in directives such as file:, attr:, etc.). + If False exceptions are propagated as expected. + + :param Distribution|None: Distribution object to which the configuration refers. + If not given a dummy object will be created and discarded after the + configuration is read. This is used for auto-discovery of packages in the case + a dynamic configuration (e.g. ``attr`` or ``cmdclass``) is expanded. + When ``expand=False`` this object is simply ignored. + + :rtype: dict + """ + filepath = os.path.abspath(filepath) + + if not os.path.isfile(filepath): + raise FileError(f"Configuration file {filepath!r} does not exist.") + + asdict = load_file(filepath) or {} + project_table = asdict.get("project", {}) + tool_table = asdict.get("tool", {}) + setuptools_table = tool_table.get("setuptools", {}) + if not asdict or not (project_table or setuptools_table): + return {} # User is not using pyproject to configure setuptools + + # TODO: Remove the following once the feature stabilizes: + msg = ( + "Support for project metadata in `pyproject.toml` is still experimental " + "and may be removed (or change) in future releases." + ) + warnings.warn(msg, _ExperimentalProjectMetadata) + + # There is an overall sense in the community that making include_package_data=True + # the default would be an improvement. + # `ini2toml` backfills include_package_data=False when nothing is explicitly given, + # therefore setting a default here is backwards compatible. + orig_setuptools_table = setuptools_table.copy() + if dist and getattr(dist, "include_package_data") is not None: + setuptools_table.setdefault("include-package-data", dist.include_package_data) + else: + setuptools_table.setdefault("include-package-data", True) + # Persist changes: + asdict["tool"] = tool_table + tool_table["setuptools"] = setuptools_table + + try: + # Don't complain about unrelated errors (e.g. tools not using the "tool" table) + subset = {"project": project_table, "tool": {"setuptools": setuptools_table}} + validate(subset, filepath) + except Exception as ex: + # TODO: Remove the following once the feature stabilizes: + if _skip_bad_config(project_table, orig_setuptools_table, dist): + return {} + # TODO: After the previous statement is removed the try/except can be replaced + # by the _ignore_errors context manager. + if ignore_option_errors: + _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}") + else: + raise # re-raise exception + + if expand: + root_dir = os.path.dirname(filepath) + return expand_configuration(asdict, root_dir, ignore_option_errors, dist) + + return asdict + + +def _skip_bad_config( + project_cfg: dict, setuptools_cfg: dict, dist: Optional["Distribution"] +) -> bool: + """Be temporarily forgiving with invalid ``pyproject.toml``""" + # See pypa/setuptools#3199 and pypa/cibuildwheel#1064 + + if dist is None or ( + dist.metadata.name is None + and dist.metadata.version is None + and dist.install_requires is None + ): + # It seems that the build is not getting any configuration from other places + return False + + if setuptools_cfg: + # If `[tool.setuptools]` is set, then `pyproject.toml` config is intentional + return False + + given_config = set(project_cfg.keys()) + popular_subset = {"name", "version", "python_requires", "requires-python"} + if given_config <= popular_subset: + # It seems that the docs in cibuildtool has been inadvertently encouraging users + # to create `pyproject.toml` files that are not compliant with the standards. + # Let's be forgiving for the time being. + warnings.warn(_InvalidFile.message(), _InvalidFile, stacklevel=2) + return True + + return False + + +def expand_configuration( + config: dict, + root_dir: Optional[_Path] = None, + ignore_option_errors: bool = False, + dist: Optional["Distribution"] = None, +) -> dict: + """Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...) + find their final values. + + :param dict config: Dict containing the configuration for the distribution + :param str root_dir: Top-level directory for the distribution/project + (the same directory where ``pyproject.toml`` is place) + :param bool ignore_option_errors: see :func:`read_configuration` + :param Distribution|None: Distribution object to which the configuration refers. + If not given a dummy object will be created and discarded after the + configuration is read. Used in the case a dynamic configuration + (e.g. ``attr`` or ``cmdclass``). + + :rtype: dict + """ + return _ConfigExpander(config, root_dir, ignore_option_errors, dist).expand() + + +class _ConfigExpander: + def __init__( + self, + config: dict, + root_dir: Optional[_Path] = None, + ignore_option_errors: bool = False, + dist: Optional["Distribution"] = None, + ): + self.config = config + self.root_dir = root_dir or os.getcwd() + self.project_cfg = config.get("project", {}) + self.dynamic = self.project_cfg.get("dynamic", []) + self.setuptools_cfg = config.get("tool", {}).get("setuptools", {}) + self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {}) + self.ignore_option_errors = ignore_option_errors + self._dist = dist + + def _ensure_dist(self) -> "Distribution": + from setuptools.dist import Distribution + + attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)} + return self._dist or Distribution(attrs) + + def _process_field(self, container: dict, field: str, fn: Callable): + if field in container: + with _ignore_errors(self.ignore_option_errors): + container[field] = fn(container[field]) + + def _canonic_package_data(self, field="package-data"): + package_data = self.setuptools_cfg.get(field, {}) + return _expand.canonic_package_data(package_data) + + def expand(self): + self._expand_packages() + self._canonic_package_data() + self._canonic_package_data("exclude-package-data") + + # A distribution object is required for discovering the correct package_dir + dist = self._ensure_dist() + + with _EnsurePackagesDiscovered(dist, self.setuptools_cfg) as ensure_discovered: + package_dir = ensure_discovered.package_dir + self._expand_data_files() + self._expand_cmdclass(package_dir) + self._expand_all_dynamic(dist, package_dir) + + return self.config + + def _expand_packages(self): + packages = self.setuptools_cfg.get("packages") + if packages is None or isinstance(packages, (list, tuple)): + return + + find = packages.get("find") + if isinstance(find, dict): + find["root_dir"] = self.root_dir + find["fill_package_dir"] = self.setuptools_cfg.setdefault("package-dir", {}) + with _ignore_errors(self.ignore_option_errors): + self.setuptools_cfg["packages"] = _expand.find_packages(**find) + + def _expand_data_files(self): + data_files = partial(_expand.canonic_data_files, root_dir=self.root_dir) + self._process_field(self.setuptools_cfg, "data-files", data_files) + + def _expand_cmdclass(self, package_dir: Mapping[str, str]): + root_dir = self.root_dir + cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir) + self._process_field(self.setuptools_cfg, "cmdclass", cmdclass) + + def _expand_all_dynamic(self, dist: "Distribution", package_dir: Mapping[str, str]): + special = ( # need special handling + "version", + "readme", + "entry-points", + "scripts", + "gui-scripts", + "classifiers", + ) + # `_obtain` functions are assumed to raise appropriate exceptions/warnings. + obtained_dynamic = { + field: self._obtain(dist, field, package_dir) + for field in self.dynamic + if field not in special + } + obtained_dynamic.update( + self._obtain_entry_points(dist, package_dir) or {}, + version=self._obtain_version(dist, package_dir), + readme=self._obtain_readme(dist), + classifiers=self._obtain_classifiers(dist), + ) + # `None` indicates there is nothing in `tool.setuptools.dynamic` but the value + # might have already been set by setup.py/extensions, so avoid overwriting. + updates = {k: v for k, v in obtained_dynamic.items() if v is not None} + self.project_cfg.update(updates) + + def _ensure_previously_set(self, dist: "Distribution", field: str): + previous = _PREVIOUSLY_DEFINED[field](dist) + if previous is None and not self.ignore_option_errors: + msg = ( + f"No configuration found for dynamic {field!r}.\n" + "Some dynamic fields need to be specified via `tool.setuptools.dynamic`" + "\nothers must be specified via the equivalent attribute in `setup.py`." + ) + raise OptionError(msg) + + def _obtain(self, dist: "Distribution", field: str, package_dir: Mapping[str, str]): + if field in self.dynamic_cfg: + directive = self.dynamic_cfg[field] + with _ignore_errors(self.ignore_option_errors): + root_dir = self.root_dir + if "file" in directive: + return _expand.read_files(directive["file"], root_dir) + if "attr" in directive: + return _expand.read_attr(directive["attr"], package_dir, root_dir) + msg = f"invalid `tool.setuptools.dynamic.{field}`: {directive!r}" + raise ValueError(msg) + return None + self._ensure_previously_set(dist, field) + return None + + def _obtain_version(self, dist: "Distribution", package_dir: Mapping[str, str]): + # Since plugins can set version, let's silently skip if it cannot be obtained + if "version" in self.dynamic and "version" in self.dynamic_cfg: + return _expand.version(self._obtain(dist, "version", package_dir)) + return None + + def _obtain_readme(self, dist: "Distribution") -> Optional[Dict[str, str]]: + if "readme" in self.dynamic: + dynamic_cfg = self.dynamic_cfg + return { + "text": self._obtain(dist, "readme", {}), + "content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"), + } + return None + + def _obtain_entry_points( + self, dist: "Distribution", package_dir: Mapping[str, str] + ) -> Optional[Dict[str, dict]]: + fields = ("entry-points", "scripts", "gui-scripts") + if not any(field in self.dynamic for field in fields): + return None + + text = self._obtain(dist, "entry-points", package_dir) + if text is None: + return None + + groups = _expand.entry_points(text) + expanded = {"entry-points": groups} + + def _set_scripts(field: str, group: str): + if group in groups: + value = groups.pop(group) + if field not in self.dynamic: + msg = _WouldIgnoreField.message(field, value) + warnings.warn(msg, _WouldIgnoreField) + # TODO: Don't set field when support for pyproject.toml stabilizes + # instead raise an error as specified in PEP 621 + expanded[field] = value + + _set_scripts("scripts", "console_scripts") + _set_scripts("gui-scripts", "gui_scripts") + + return expanded + + def _obtain_classifiers(self, dist: "Distribution"): + if "classifiers" in self.dynamic: + value = self._obtain(dist, "classifiers", {}) + if value: + return value.splitlines() + return None + + +@contextmanager +def _ignore_errors(ignore_option_errors: bool): + if not ignore_option_errors: + yield + return + + try: + yield + except Exception as ex: + _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}") + + +class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered): + def __init__(self, distribution: "Distribution", setuptools_cfg: dict): + super().__init__(distribution) + self._setuptools_cfg = setuptools_cfg + + def __enter__(self): + """When entering the context, the values of ``packages``, ``py_modules`` and + ``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``. + """ + dist, cfg = self._dist, self._setuptools_cfg + package_dir: Dict[str, str] = cfg.setdefault("package-dir", {}) + package_dir.update(dist.package_dir or {}) + dist.package_dir = package_dir # needs to be the same object + + dist.set_defaults._ignore_ext_modules() # pyproject.toml-specific behaviour + + # Set `py_modules` and `packages` in dist to short-circuit auto-discovery, + # but avoid overwriting empty lists purposefully set by users. + if dist.py_modules is None: + dist.py_modules = cfg.get("py-modules") + if dist.packages is None: + dist.packages = cfg.get("packages") + + return super().__enter__() + + def __exit__(self, exc_type, exc_value, traceback): + """When exiting the context, if values of ``packages``, ``py_modules`` and + ``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``. + """ + # If anything was discovered set them back, so they count in the final config. + self._setuptools_cfg.setdefault("packages", self._dist.packages) + self._setuptools_cfg.setdefault("py-modules", self._dist.py_modules) + return super().__exit__(exc_type, exc_value, traceback) + + +class _ExperimentalProjectMetadata(UserWarning): + """Explicitly inform users that `pyproject.toml` configuration is experimental""" + + +class _InvalidFile(UserWarning): + """Inform users that the given `pyproject.toml` is experimental: + !!\n\n + ############################ + # Invalid `pyproject.toml` # + ############################ + + Any configurations in `pyproject.toml` will be ignored. + Please note that future releases of setuptools will halt the build process + if an invalid file is given. + + To prevent setuptools from considering `pyproject.toml` please + DO NOT include the `[project]` or `[tool.setuptools]` tables in your file. + \n\n!! + """ + + @classmethod + def message(cls): + from inspect import cleandoc + msg = "\n".join(cls.__doc__.splitlines()[1:]) + return cleandoc(msg) diff --git a/setuptools/config.py b/setuptools/config/setupcfg.py index b4e968e5..d485a8bb 100644 --- a/setuptools/config.py +++ b/setuptools/config/setupcfg.py @@ -1,63 +1,40 @@ -import ast -import io +"""Load setuptools configuration from ``setup.cfg`` files""" import os -import sys import warnings import functools -import importlib from collections import defaultdict from functools import partial from functools import wraps -from glob import iglob -import contextlib +from typing import (TYPE_CHECKING, Callable, Any, Dict, Generic, Iterable, List, + Optional, Tuple, TypeVar, Union) from distutils.errors import DistutilsOptionError, DistutilsFileError from setuptools.extern.packaging.version import Version, InvalidVersion from setuptools.extern.packaging.specifiers import SpecifierSet +from . import expand -class StaticModule: - """ - Attempt to load the module by the name - """ - - def __init__(self, name): - spec = importlib.util.find_spec(name) - with open(spec.origin) as strm: - src = strm.read() - module = ast.parse(src) - vars(self).update(locals()) - del self.self - - def __getattr__(self, attr): - try: - return next( - ast.literal_eval(statement.value) - for statement in self.module.body - if isinstance(statement, ast.Assign) - for target in statement.targets - if isinstance(target, ast.Name) and target.id == attr - ) - except Exception as e: - raise AttributeError( - "{self.name} has no attribute {attr}".format(**locals()) - ) from e +if TYPE_CHECKING: + from setuptools.dist import Distribution # noqa + from distutils.dist import DistributionMetadata # noqa - -@contextlib.contextmanager -def patch_path(path): - """ - Add path to front of sys.path for the duration of the context. - """ - try: - sys.path.insert(0, path) - yield - finally: - sys.path.remove(path) +_Path = Union[str, os.PathLike] +SingleCommandOptions = Dict["str", Tuple["str", Any]] +"""Dict that associate the name of the options of a particular command to a +tuple. The first element of the tuple indicates the origin of the option value +(e.g. the name of the configuration file where it was read from), +while the second element of the tuple is the option value itself +""" +AllCommandOptions = Dict["str", SingleCommandOptions] # cmd name => its options +Target = TypeVar("Target", bound=Union["Distribution", "DistributionMetadata"]) -def read_configuration(filepath, find_others=False, ignore_option_errors=False): +def read_configuration( + filepath: _Path, + find_others=False, + ignore_option_errors=False +) -> dict: """Read given configuration file and returns options from it as a dict. :param str|unicode filepath: Path to configuration file @@ -73,7 +50,30 @@ def read_configuration(filepath, find_others=False, ignore_option_errors=False): :rtype: dict """ - from setuptools.dist import Distribution, _Distribution + from setuptools.dist import Distribution + + dist = Distribution() + filenames = dist.find_config_files() if find_others else [] + handlers = _apply(dist, filepath, filenames, ignore_option_errors) + return configuration_to_dict(handlers) + + +def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution": + """Apply the configuration from a ``setup.cfg`` file into an existing + distribution object. + """ + _apply(dist, filepath) + dist._finalize_requires() + return dist + + +def _apply( + dist: "Distribution", filepath: _Path, + other_files: Iterable[_Path] = (), + ignore_option_errors: bool = False, +) -> Tuple["ConfigHandler", ...]: + """Read configuration from ``filepath`` and applies to the ``dist`` object.""" + from setuptools.dist import _Distribution filepath = os.path.abspath(filepath) @@ -82,27 +82,21 @@ def read_configuration(filepath, find_others=False, ignore_option_errors=False): current_directory = os.getcwd() os.chdir(os.path.dirname(filepath)) + filenames = [*other_files, filepath] try: - dist = Distribution() - - filenames = dist.find_config_files() if find_others else [] - if filepath not in filenames: - filenames.append(filepath) - _Distribution.parse_config_files(dist, filenames=filenames) - handlers = parse_configuration( dist, dist.command_options, ignore_option_errors=ignore_option_errors ) - + dist._finalize_license_files() finally: os.chdir(current_directory) - return configuration_to_dict(handlers) + return handlers -def _get_option(target_obj, key): +def _get_option(target_obj: Target, key: str): """ Given a target object and option key, get that option from the target object, either through a get_{key} method or @@ -114,7 +108,7 @@ def _get_option(target_obj, key): return getter() -def configuration_to_dict(handlers): +def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict: """Returns configuration data gathered by given handlers as a dict. :param list[ConfigHandler] handlers: Handlers list, @@ -122,7 +116,7 @@ def configuration_to_dict(handlers): :rtype: dict """ - config_dict = defaultdict(dict) + config_dict: dict = defaultdict(dict) for handler in handlers: for option in handler.set_options: @@ -132,7 +126,11 @@ def configuration_to_dict(handlers): return config_dict -def parse_configuration(distribution, command_options, ignore_option_errors=False): +def parse_configuration( + distribution: "Distribution", + command_options: AllCommandOptions, + ignore_option_errors=False +) -> Tuple["ConfigMetadataHandler", "ConfigOptionsHandler"]: """Performs additional parsing of configuration options for a distribution. @@ -146,38 +144,55 @@ def parse_configuration(distribution, command_options, ignore_option_errors=Fals If False exceptions are propagated as expected. :rtype: list """ - options = ConfigOptionsHandler(distribution, command_options, ignore_option_errors) - options.parse() + with expand.EnsurePackagesDiscovered(distribution) as ensure_discovered: + options = ConfigOptionsHandler( + distribution, + command_options, + ignore_option_errors, + ensure_discovered, + ) - meta = ConfigMetadataHandler( - distribution.metadata, - command_options, - ignore_option_errors, - distribution.package_dir, - ) - meta.parse() + options.parse() + if not distribution.package_dir: + distribution.package_dir = options.package_dir # Filled by `find_packages` + + meta = ConfigMetadataHandler( + distribution.metadata, + command_options, + ignore_option_errors, + ensure_discovered, + distribution.package_dir, + distribution.src_root, + ) + meta.parse() return meta, options -class ConfigHandler: +class ConfigHandler(Generic[Target]): """Handles metadata supplied in configuration files.""" - section_prefix = None + section_prefix: str """Prefix for config sections handled by this handler. Must be provided by class heirs. """ - aliases = {} + aliases: Dict[str, str] = {} """Options aliases. For compatibility with various packages. E.g.: d2to1 and pbr. Note: `-` in keys is replaced with `_` by config parser. """ - def __init__(self, target_obj, options, ignore_option_errors=False): - sections = {} + def __init__( + self, + target_obj: Target, + options: AllCommandOptions, + ignore_option_errors, + ensure_discovered: expand.EnsurePackagesDiscovered, + ): + sections: AllCommandOptions = {} section_prefix = self.section_prefix for section_name, section_options in options.items(): @@ -190,7 +205,8 @@ class ConfigHandler: self.ignore_option_errors = ignore_option_errors self.target_obj = target_obj self.sections = sections - self.set_options = [] + self.set_options: List[str] = [] + self.ensure_discovered = ensure_discovered @property def parsers(self): @@ -258,34 +274,6 @@ class ConfigHandler: return [chunk.strip() for chunk in value if chunk.strip()] @classmethod - def _parse_list_glob(cls, value, separator=','): - """Equivalent to _parse_list() but expands any glob patterns using glob(). - - However, unlike with glob() calls, the results remain relative paths. - - :param value: - :param separator: List items separator character. - :rtype: list - """ - glob_characters = ('*', '?', '[', ']', '{', '}') - values = cls._parse_list(value, separator=separator) - expanded_values = [] - for value in values: - - # Has globby characters? - if any(char in value for char in glob_characters): - # then expand the glob pattern while keeping paths *relative*: - expanded_values.extend(sorted( - os.path.relpath(path, os.getcwd()) - for path in iglob(os.path.abspath(value)))) - - else: - # take the value as-is: - expanded_values.append(value) - - return expanded_values - - @classmethod def _parse_dict(cls, value): """Represents value as a dict. @@ -338,7 +326,7 @@ class ConfigHandler: return parser @classmethod - def _parse_file(cls, value): + def _parse_file(cls, value, root_dir: _Path): """Represents value as a string, allowing including text from nearest files using `file:` directive. @@ -360,25 +348,10 @@ class ConfigHandler: return value spec = value[len(include_directive) :] - filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) - return '\n'.join( - cls._read_file(path) - for path in filepaths - if (cls._assert_local(path) or True) and os.path.isfile(path) - ) + filepaths = (path.strip() for path in spec.split(',')) + return expand.read_files(filepaths, root_dir) - @staticmethod - def _assert_local(filepath): - if not filepath.startswith(os.getcwd()): - raise DistutilsOptionError('`file:` directive can not access %s' % filepath) - - @staticmethod - def _read_file(filepath): - with io.open(filepath, encoding='utf-8') as f: - return f.read() - - @classmethod - def _parse_attr(cls, value, package_dir=None): + def _parse_attr(self, value, package_dir, root_dir: _Path): """Represents value as a module attribute. Examples: @@ -392,36 +365,11 @@ class ConfigHandler: if not value.startswith(attr_directive): return value - attrs_path = value.replace(attr_directive, '').strip().split('.') - attr_name = attrs_path.pop() - - module_name = '.'.join(attrs_path) - module_name = module_name or '__init__' - - parent_path = os.getcwd() - if package_dir: - if attrs_path[0] in package_dir: - # A custom path was specified for the module we want to import - custom_path = package_dir[attrs_path[0]] - parts = custom_path.rsplit('/', 1) - if len(parts) > 1: - parent_path = os.path.join(os.getcwd(), parts[0]) - module_name = parts[1] - else: - module_name = custom_path - elif '' in package_dir: - # A custom parent directory was specified for all root modules - parent_path = os.path.join(os.getcwd(), package_dir['']) - - with patch_path(parent_path): - try: - # attempt to load value statically - return getattr(StaticModule(module_name), attr_name) - except Exception: - # fallback to simple import - module = importlib.import_module(module_name) + attr_desc = value.replace(attr_directive, '') - return getattr(module, attr_name) + # Make sure package_dir is populated correctly, so `attr:` directives can work + package_dir.update(self.ensure_discovered.package_dir) + return expand.read_attr(attr_desc, package_dir, root_dir) @classmethod def _get_parser_compound(cls, *parse_methods): @@ -482,7 +430,7 @@ class ConfigHandler: if section_name: # [section.option] variant method_postfix = '_%s' % section_name - section_parser_method = getattr( + section_parser_method: Optional[Callable] = getattr( self, # Dots in section names are translated into dunderscores. ('parse_section%s' % method_postfix).replace('.', '__'), @@ -513,7 +461,7 @@ class ConfigHandler: return config_handler -class ConfigMetadataHandler(ConfigHandler): +class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]): section_prefix = 'metadata' @@ -531,18 +479,23 @@ class ConfigMetadataHandler(ConfigHandler): """ def __init__( - self, target_obj, options, ignore_option_errors=False, package_dir=None + self, + target_obj: "DistributionMetadata", + options: AllCommandOptions, + ignore_option_errors: bool, + ensure_discovered: expand.EnsurePackagesDiscovered, + package_dir: Optional[dict] = None, + root_dir: _Path = os.curdir ): - super(ConfigMetadataHandler, self).__init__( - target_obj, options, ignore_option_errors - ) + super().__init__(target_obj, options, ignore_option_errors, ensure_discovered) self.package_dir = package_dir + self.root_dir = root_dir @property def parsers(self): """Metadata item name to parser function mapping.""" parse_list = self._parse_list - parse_file = self._parse_file + parse_file = partial(self._parse_file, root_dir=self.root_dir) parse_dict = self._parse_dict exclude_files_parser = self._exclude_files_parser @@ -579,7 +532,7 @@ class ConfigMetadataHandler(ConfigHandler): :rtype: str """ - version = self._parse_file(value) + version = self._parse_file(value, self.root_dir) if version != value: version = version.strip() @@ -596,24 +549,24 @@ class ConfigMetadataHandler(ConfigHandler): return version - version = self._parse_attr(value, self.package_dir) - - if callable(version): - version = version() - - if not isinstance(version, str): - if hasattr(version, '__iter__'): - version = '.'.join(map(str, version)) - else: - version = '%s' % version + return expand.version(self._parse_attr(value, self.package_dir, self.root_dir)) - return version - -class ConfigOptionsHandler(ConfigHandler): +class ConfigOptionsHandler(ConfigHandler["Distribution"]): section_prefix = 'options' + def __init__( + self, + target_obj: "Distribution", + options: AllCommandOptions, + ignore_option_errors: bool, + ensure_discovered: expand.EnsurePackagesDiscovered, + ): + super().__init__(target_obj, options, ignore_option_errors, ensure_discovered) + self.root_dir = target_obj.src_root + self.package_dir: Dict[str, str] = {} # To be filled by `find_packages` + @property def parsers(self): """Metadata item name to parser function mapping.""" @@ -622,6 +575,7 @@ class ConfigOptionsHandler(ConfigHandler): parse_bool = self._parse_bool parse_dict = self._parse_dict parse_cmdclass = self._parse_cmdclass + parse_file = partial(self._parse_file, root_dir=self.root_dir) return { 'zip_safe': parse_bool, @@ -635,23 +589,15 @@ class ConfigOptionsHandler(ConfigHandler): 'setup_requires': parse_list_semicolon, 'tests_require': parse_list_semicolon, 'packages': self._parse_packages, - 'entry_points': self._parse_file, + 'entry_points': parse_file, 'py_modules': parse_list, 'python_requires': SpecifierSet, 'cmdclass': parse_cmdclass, } def _parse_cmdclass(self, value): - def resolve_class(qualified_class_name): - idx = qualified_class_name.rfind('.') - class_name = qualified_class_name[idx + 1 :] - pkg_name = qualified_class_name[:idx] - - module = __import__(pkg_name) - - return getattr(module, class_name) - - return {k: resolve_class(v) for k, v in self._parse_dict(value).items()} + package_dir = self.ensure_discovered.package_dir + return expand.cmdclass(self._parse_dict(value), package_dir, self.root_dir) def _parse_packages(self, value): """Parses `packages` option value. @@ -665,19 +611,18 @@ class ConfigOptionsHandler(ConfigHandler): if trimmed_value not in find_directives: return self._parse_list(value) - findns = trimmed_value == find_directives[1] - # Read function arguments from a dedicated section. find_kwargs = self.parse_section_packages__find( self.sections.get('packages.find', {}) ) - if findns: - from setuptools import find_namespace_packages as find_packages - else: - from setuptools import find_packages + find_kwargs.update( + namespaces=(trimmed_value == find_directives[1]), + root_dir=self.root_dir, + fill_package_dir=self.package_dir, + ) - return find_packages(**find_kwargs) + return expand.find_packages(**find_kwargs) def parse_section_packages__find(self, section_options): """Parses `packages.find` configuration file section. @@ -709,14 +654,8 @@ class ConfigOptionsHandler(ConfigHandler): self['entry_points'] = parsed def _parse_package_data(self, section_options): - parsed = self._parse_section_to_dict(section_options, self._parse_list) - - root = parsed.get('*') - if root: - parsed[''] = root - del parsed['*'] - - return parsed + package_data = self._parse_section_to_dict(section_options, self._parse_list) + return expand.canonic_package_data(package_data) def parse_section_package_data(self, section_options): """Parses `package_data` configuration file section. @@ -738,14 +677,13 @@ class ConfigOptionsHandler(ConfigHandler): :param dict section_options: """ parse_list = partial(self._parse_list, separator=';') - self['extras_require'] = self._parse_section_to_dict( - section_options, parse_list - ) + parsed = self._parse_section_to_dict(section_options, parse_list) + self['extras_require'] = parsed def parse_section_data_files(self, section_options): """Parses `data_files` configuration file section. :param dict section_options: """ - parsed = self._parse_section_to_dict(section_options, self._parse_list_glob) - self['data_files'] = [(k, v) for k, v in parsed.items()] + parsed = self._parse_section_to_dict(section_options, self._parse_list) + self['data_files'] = expand.canonic_data_files(parsed, self.root_dir) diff --git a/setuptools/discovery.py b/setuptools/discovery.py new file mode 100644 index 00000000..95c3c7f8 --- /dev/null +++ b/setuptools/discovery.py @@ -0,0 +1,588 @@ +"""Automatic discovery of Python modules and packages (for inclusion in the +distribution) and other config values. + +For the purposes of this module, the following nomenclature is used: + +- "src-layout": a directory representing a Python project that contains a "src" + folder. Everything under the "src" folder is meant to be included in the + distribution when packaging the project. Example:: + + . + ├── tox.ini + ├── pyproject.toml + └── src/ + └── mypkg/ + ├── __init__.py + ├── mymodule.py + └── my_data_file.txt + +- "flat-layout": a Python project that does not use "src-layout" but instead + have a directory under the project root for each package:: + + . + ├── tox.ini + ├── pyproject.toml + └── mypkg/ + ├── __init__.py + ├── mymodule.py + └── my_data_file.txt + +- "single-module": a project that contains a single Python script direct under + the project root (no directory used):: + + . + ├── tox.ini + ├── pyproject.toml + └── mymodule.py + +""" + +import itertools +import os +from fnmatch import fnmatchcase +from glob import glob +from pathlib import Path +from typing import TYPE_CHECKING +from typing import Callable, Dict, Iterator, Iterable, List, Optional, Tuple, Union + +import _distutils_hack.override # noqa: F401 + +from distutils import log +from distutils.util import convert_path + +_Path = Union[str, os.PathLike] +_Filter = Callable[[str], bool] +StrIter = Iterator[str] + +chain_iter = itertools.chain.from_iterable + +if TYPE_CHECKING: + from setuptools import Distribution # noqa + + +def _valid_name(path: _Path) -> bool: + # Ignore invalid names that cannot be imported directly + return os.path.basename(path).isidentifier() + + +class _Finder: + """Base class that exposes functionality for module/package finders""" + + ALWAYS_EXCLUDE: Tuple[str, ...] = () + DEFAULT_EXCLUDE: Tuple[str, ...] = () + + @classmethod + def find( + cls, + where: _Path = '.', + exclude: Iterable[str] = (), + include: Iterable[str] = ('*',) + ) -> List[str]: + """Return a list of all Python items (packages or modules, depending on + the finder implementation) found within directory 'where'. + + 'where' is the root directory which will be searched. + It should be supplied as a "cross-platform" (i.e. URL-style) path; + it will be converted to the appropriate local path syntax. + + 'exclude' is a sequence of names to exclude; '*' can be used + as a wildcard in the names. + When finding packages, 'foo.*' will exclude all subpackages of 'foo' + (but not 'foo' itself). + + 'include' is a sequence of names to include. + If it's specified, only the named items will be included. + If it's not specified, all found items will be included. + 'include' can contain shell style wildcard patterns just like + 'exclude'. + """ + + exclude = exclude or cls.DEFAULT_EXCLUDE + return list( + cls._find_iter( + convert_path(str(where)), + cls._build_filter(*cls.ALWAYS_EXCLUDE, *exclude), + cls._build_filter(*include), + ) + ) + + @classmethod + def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: + raise NotImplementedError + + @staticmethod + def _build_filter(*patterns: str) -> _Filter: + """ + Given a list of patterns, return a callable that will be true only if + the input matches at least one of the patterns. + """ + return lambda name: any(fnmatchcase(name, pat) for pat in patterns) + + +class PackageFinder(_Finder): + """ + Generate a list of all Python packages found within a directory + """ + + ALWAYS_EXCLUDE = ("ez_setup", "*__pycache__") + + @classmethod + def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: + """ + All the packages found in 'where' that pass the 'include' filter, but + not the 'exclude' filter. + """ + for root, dirs, files in os.walk(str(where), followlinks=True): + # Copy dirs to iterate over it, then empty dirs. + all_dirs = dirs[:] + dirs[:] = [] + + for dir in all_dirs: + full_path = os.path.join(root, dir) + rel_path = os.path.relpath(full_path, where) + package = rel_path.replace(os.path.sep, '.') + + # Skip directory trees that are not valid packages + if '.' in dir or not cls._looks_like_package(full_path, package): + continue + + # Should this package be included? + if include(package) and not exclude(package): + yield package + + # Keep searching subdirectories, as there may be more packages + # down there, even if the parent was excluded. + dirs.append(dir) + + @staticmethod + def _looks_like_package(path: _Path, _package_name: str) -> bool: + """Does a directory look like a package?""" + return os.path.isfile(os.path.join(path, '__init__.py')) + + +class PEP420PackageFinder(PackageFinder): + @staticmethod + def _looks_like_package(_path: _Path, _package_name: str) -> bool: + return True + + +class ModuleFinder(_Finder): + """Find isolated Python modules. + This function will **not** recurse subdirectories. + """ + + @classmethod + def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: + for file in glob(os.path.join(where, "*.py")): + module, _ext = os.path.splitext(os.path.basename(file)) + + if not cls._looks_like_module(module): + continue + + if include(module) and not exclude(module): + yield module + + _looks_like_module = staticmethod(_valid_name) + + +# We have to be extra careful in the case of flat layout to not include files +# and directories not meant for distribution (e.g. tool-related) + + +class FlatLayoutPackageFinder(PEP420PackageFinder): + _EXCLUDE = ( + "ci", + "bin", + "doc", + "docs", + "documentation", + "manpages", + "news", + "changelog", + "test", + "tests", + "unit_test", + "unit_tests", + "example", + "examples", + "scripts", + "tools", + "util", + "utils", + "python", + "build", + "dist", + "venv", + "env", + "requirements", + # ---- Task runners / Build tools ---- + "tasks", # invoke + "fabfile", # fabric + "site_scons", # SCons + # ---- Other tools ---- + "benchmark", + "benchmarks", + "exercise", + "exercises", + # ---- Hidden directories/Private packages ---- + "[._]*", + ) + + DEFAULT_EXCLUDE = tuple(chain_iter((p, f"{p}.*") for p in _EXCLUDE)) + """Reserved package names""" + + @staticmethod + def _looks_like_package(_path: _Path, package_name: str) -> bool: + names = package_name.split('.') + # Consider PEP 561 + root_pkg_is_valid = names[0].isidentifier() or names[0].endswith("-stubs") + return root_pkg_is_valid and all(name.isidentifier() for name in names[1:]) + + +class FlatLayoutModuleFinder(ModuleFinder): + DEFAULT_EXCLUDE = ( + "setup", + "conftest", + "test", + "tests", + "example", + "examples", + "build", + # ---- Task runners ---- + "toxfile", + "noxfile", + "pavement", + "dodo", + "tasks", + "fabfile", + # ---- Other tools ---- + "[Ss][Cc]onstruct", # SCons + "conanfile", # Connan: C/C++ build tool + "manage", # Django + "benchmark", + "benchmarks", + "exercise", + "exercises", + # ---- Hidden files/Private modules ---- + "[._]*", + ) + """Reserved top-level module names""" + + +def _find_packages_within(root_pkg: str, pkg_dir: _Path) -> List[str]: + nested = PEP420PackageFinder.find(pkg_dir) + return [root_pkg] + [".".join((root_pkg, n)) for n in nested] + + +class ConfigDiscovery: + """Fill-in metadata and options that can be automatically derived + (from other metadata/options, the file system or conventions) + """ + + def __init__(self, distribution: "Distribution"): + self.dist = distribution + self._called = False + self._disabled = False + self._skip_ext_modules = False + + def _disable(self): + """Internal API to disable automatic discovery""" + self._disabled = True + + def _ignore_ext_modules(self): + """Internal API to disregard ext_modules. + + Normally auto-discovery would not be triggered if ``ext_modules`` are set + (this is done for backward compatibility with existing packages relying on + ``setup.py`` or ``setup.cfg``). However, ``setuptools`` can call this function + to ignore given ``ext_modules`` and proceed with the auto-discovery if + ``packages`` and ``py_modules`` are not given (e.g. when using pyproject.toml + metadata). + """ + self._skip_ext_modules = True + + @property + def _root_dir(self) -> _Path: + # The best is to wait until `src_root` is set in dist, before using _root_dir. + return self.dist.src_root or os.curdir + + @property + def _package_dir(self) -> Dict[str, str]: + if self.dist.package_dir is None: + return {} + return self.dist.package_dir + + def __call__(self, force=False, name=True, ignore_ext_modules=False): + """Automatically discover missing configuration fields + and modifies the given ``distribution`` object in-place. + + Note that by default this will only have an effect the first time the + ``ConfigDiscovery`` object is called. + + To repeatedly invoke automatic discovery (e.g. when the project + directory changes), please use ``force=True`` (or create a new + ``ConfigDiscovery`` instance). + """ + if force is False and (self._called or self._disabled): + # Avoid overhead of multiple calls + return + + self._analyse_package_layout(ignore_ext_modules) + if name: + self.analyse_name() # depends on ``packages`` and ``py_modules`` + + self._called = True + + def _explicitly_specified(self, ignore_ext_modules: bool) -> bool: + """``True`` if the user has specified some form of package/module listing""" + ignore_ext_modules = ignore_ext_modules or self._skip_ext_modules + ext_modules = not (self.dist.ext_modules is None or ignore_ext_modules) + return ( + self.dist.packages is not None + or self.dist.py_modules is not None + or ext_modules + or hasattr(self.dist, "configuration") and self.dist.configuration + # ^ Some projects use numpy.distutils.misc_util.Configuration + ) + + def _analyse_package_layout(self, ignore_ext_modules: bool) -> bool: + if self._explicitly_specified(ignore_ext_modules): + # For backward compatibility, just try to find modules/packages + # when nothing is given + return True + + log.debug( + "No `packages` or `py_modules` configuration, performing " + "automatic discovery." + ) + + return ( + self._analyse_explicit_layout() + or self._analyse_src_layout() + # flat-layout is the trickiest for discovery so it should be last + or self._analyse_flat_layout() + ) + + def _analyse_explicit_layout(self) -> bool: + """The user can explicitly give a package layout via ``package_dir``""" + package_dir = self._package_dir.copy() # don't modify directly + package_dir.pop("", None) # This falls under the "src-layout" umbrella + root_dir = self._root_dir + + if not package_dir: + return False + + log.debug(f"`explicit-layout` detected -- analysing {package_dir}") + pkgs = chain_iter( + _find_packages_within(pkg, os.path.join(root_dir, parent_dir)) + for pkg, parent_dir in package_dir.items() + ) + self.dist.packages = list(pkgs) + log.debug(f"discovered packages -- {self.dist.packages}") + return True + + def _analyse_src_layout(self) -> bool: + """Try to find all packages or modules under the ``src`` directory + (or anything pointed by ``package_dir[""]``). + + The "src-layout" is relatively safe for automatic discovery. + We assume that everything within is meant to be included in the + distribution. + + If ``package_dir[""]`` is not given, but the ``src`` directory exists, + this function will set ``package_dir[""] = "src"``. + """ + package_dir = self._package_dir + src_dir = os.path.join(self._root_dir, package_dir.get("", "src")) + if not os.path.isdir(src_dir): + return False + + log.debug(f"`src-layout` detected -- analysing {src_dir}") + package_dir.setdefault("", os.path.basename(src_dir)) + self.dist.package_dir = package_dir # persist eventual modifications + self.dist.packages = PEP420PackageFinder.find(src_dir) + self.dist.py_modules = ModuleFinder.find(src_dir) + log.debug(f"discovered packages -- {self.dist.packages}") + log.debug(f"discovered py_modules -- {self.dist.py_modules}") + return True + + def _analyse_flat_layout(self) -> bool: + """Try to find all packages and modules under the project root. + + Since the ``flat-layout`` is more dangerous in terms of accidentally including + extra files/directories, this function is more conservative and will raise an + error if multiple packages or modules are found. + + This assumes that multi-package dists are uncommon and refuse to support that + use case in order to be able to prevent unintended errors. + """ + log.debug(f"`flat-layout` detected -- analysing {self._root_dir}") + return self._analyse_flat_packages() or self._analyse_flat_modules() + + def _analyse_flat_packages(self) -> bool: + self.dist.packages = FlatLayoutPackageFinder.find(self._root_dir) + top_level = remove_nested_packages(remove_stubs(self.dist.packages)) + log.debug(f"discovered packages -- {self.dist.packages}") + self._ensure_no_accidental_inclusion(top_level, "packages") + return bool(top_level) + + def _analyse_flat_modules(self) -> bool: + self.dist.py_modules = FlatLayoutModuleFinder.find(self._root_dir) + log.debug(f"discovered py_modules -- {self.dist.py_modules}") + self._ensure_no_accidental_inclusion(self.dist.py_modules, "modules") + return bool(self.dist.py_modules) + + def _ensure_no_accidental_inclusion(self, detected: List[str], kind: str): + if len(detected) > 1: + from inspect import cleandoc + from setuptools.errors import PackageDiscoveryError + + msg = f"""Multiple top-level {kind} discovered in a flat-layout: {detected}. + + To avoid accidental inclusion of unwanted files or directories, + setuptools will not proceed with this build. + + If you are trying to create a single distribution with multiple {kind} + on purpose, you should not rely on automatic discovery. + Instead, consider the following options: + + 1. set up custom discovery (`find` directive with `include` or `exclude`) + 2. use a `src-layout` + 3. explicitly set `py_modules` or `packages` with a list of names + + To find more information, look for "package discovery" on setuptools docs. + """ + raise PackageDiscoveryError(cleandoc(msg)) + + def analyse_name(self): + """The packages/modules are the essential contribution of the author. + Therefore the name of the distribution can be derived from them. + """ + if self.dist.metadata.name or self.dist.name: + # get_name() is not reliable (can return "UNKNOWN") + return None + + log.debug("No `name` configuration, performing automatic discovery") + + name = ( + self._find_name_single_package_or_module() + or self._find_name_from_packages() + ) + if name: + self.dist.metadata.name = name + self.dist.name = name + + def _find_name_single_package_or_module(self) -> Optional[str]: + """Exactly one module or package""" + for field in ('packages', 'py_modules'): + items = getattr(self.dist, field, None) or [] + if items and len(items) == 1: + log.debug(f"Single module/package detected, name: {items[0]}") + return items[0] + + return None + + def _find_name_from_packages(self) -> Optional[str]: + """Try to find the root package that is not a PEP 420 namespace""" + if not self.dist.packages: + return None + + packages = remove_stubs(sorted(self.dist.packages, key=len)) + package_dir = self.dist.package_dir or {} + + parent_pkg = find_parent_package(packages, package_dir, self._root_dir) + if parent_pkg: + log.debug(f"Common parent package detected, name: {parent_pkg}") + return parent_pkg + + log.warn("No parent package detected, impossible to derive `name`") + return None + + +def remove_nested_packages(packages: List[str]) -> List[str]: + """Remove nested packages from a list of packages. + + >>> remove_nested_packages(["a", "a.b1", "a.b2", "a.b1.c1"]) + ['a'] + >>> remove_nested_packages(["a", "b", "c.d", "c.d.e.f", "g.h", "a.a1"]) + ['a', 'b', 'c.d', 'g.h'] + """ + pkgs = sorted(packages, key=len) + top_level = pkgs[:] + size = len(pkgs) + for i, name in enumerate(reversed(pkgs)): + if any(name.startswith(f"{other}.") for other in top_level): + top_level.pop(size - i - 1) + + return top_level + + +def remove_stubs(packages: List[str]) -> List[str]: + """Remove type stubs (:pep:`561`) from a list of packages. + + >>> remove_stubs(["a", "a.b", "a-stubs", "a-stubs.b.c", "b", "c-stubs"]) + ['a', 'a.b', 'b'] + """ + return [pkg for pkg in packages if not pkg.split(".")[0].endswith("-stubs")] + + +def find_parent_package( + packages: List[str], package_dir: Dict[str, str], root_dir: _Path +) -> Optional[str]: + """Find the parent package that is not a namespace.""" + packages = sorted(packages, key=len) + common_ancestors = [] + for i, name in enumerate(packages): + if not all(n.startswith(f"{name}.") for n in packages[i+1:]): + # Since packages are sorted by length, this condition is able + # to find a list of all common ancestors. + # When there is divergence (e.g. multiple root packages) + # the list will be empty + break + common_ancestors.append(name) + + for name in common_ancestors: + pkg_path = find_package_path(name, package_dir, root_dir) + init = os.path.join(pkg_path, "__init__.py") + if os.path.isfile(init): + return name + + return None + + +def find_package_path(name: str, package_dir: Dict[str, str], root_dir: _Path) -> str: + """Given a package name, return the path where it should be found on + disk, considering the ``package_dir`` option. + + >>> path = find_package_path("my.pkg", {"": "root/is/nested"}, ".") + >>> path.replace(os.sep, "/") + './root/is/nested/my/pkg' + + >>> path = find_package_path("my.pkg", {"my": "root/is/nested"}, ".") + >>> path.replace(os.sep, "/") + './root/is/nested/pkg' + + >>> path = find_package_path("my.pkg", {"my.pkg": "root/is/nested"}, ".") + >>> path.replace(os.sep, "/") + './root/is/nested' + + >>> path = find_package_path("other.pkg", {"my.pkg": "root/is/nested"}, ".") + >>> path.replace(os.sep, "/") + './other/pkg' + """ + parts = name.split(".") + for i in range(len(parts), 0, -1): + # Look backwards, the most specific package_dir first + partial_name = ".".join(parts[:i]) + if partial_name in package_dir: + parent = package_dir[partial_name] + return os.path.join(root_dir, parent, *parts[i:]) + + parent = package_dir.get("") or "" + return os.path.join(root_dir, *parent.split("/"), *parts) + + +def construct_package_dir(packages: List[str], package_path: _Path) -> Dict[str, str]: + parent_pkgs = remove_nested_packages(packages) + prefix = Path(package_path).parts + return {pkg: "/".join([*prefix, *pkg.split(".")]) for pkg in parent_pkgs} diff --git a/setuptools/dist.py b/setuptools/dist.py index e825785e..215c88e3 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -19,6 +19,7 @@ from glob import iglob import itertools import textwrap from typing import List, Optional, TYPE_CHECKING +from pathlib import Path from collections import defaultdict from email import message_from_file @@ -28,7 +29,8 @@ from distutils.util import rfc822_escape from setuptools.extern import packaging from setuptools.extern import ordered_set -from setuptools.extern.more_itertools import unique_everseen, always_iterable +from setuptools.extern.more_itertools import unique_everseen, partition +from setuptools.extern import nspektr from ._importlib import metadata @@ -38,9 +40,11 @@ import setuptools import setuptools.command from setuptools import windows_support from setuptools.monkey import get_unpatched -from setuptools.config import parse_configuration +from setuptools.config import setupcfg, pyprojecttoml +from setuptools.discovery import ConfigDiscovery + import pkg_resources -from setuptools.extern.packaging import version, requirements +from setuptools.extern.packaging import version from . import _reqs from . import _entry_points @@ -464,6 +468,13 @@ class Distribution(_Distribution): }, ) + # Save the original dependencies before they are processed into the egg format + self._orig_extras_require = {} + self._orig_install_requires = [] + self._tmp_extras_require = defaultdict(ordered_set.OrderedSet) + + self.set_defaults = ConfigDiscovery(self) + self._set_metadata_defaults(attrs) self.metadata.version = self._normalize_version( @@ -534,6 +545,8 @@ class Distribution(_Distribution): self.metadata.python_requires = self.python_requires if getattr(self, 'extras_require', None): + # Save original before it is messed by _convert_extras_requirements + self._orig_extras_require = self._orig_extras_require or self.extras_require for extra in self.extras_require.keys(): # Since this gets called multiple times at points where the # keys have become 'converted' extras, ensure that we are only @@ -542,6 +555,10 @@ class Distribution(_Distribution): if extra: self.metadata.provides_extras.add(extra) + if getattr(self, 'install_requires', None) and not self._orig_install_requires: + # Save original before it is messed by _move_install_requirements_markers + self._orig_install_requires = self.install_requires + self._convert_extras_requirements() self._move_install_requirements_markers() @@ -552,7 +569,8 @@ class Distribution(_Distribution): `"extra:{marker}": ["barbazquux"]`. """ spec_ext_reqs = getattr(self, 'extras_require', None) or {} - self._tmp_extras_require = defaultdict(list) + tmp = defaultdict(ordered_set.OrderedSet) + self._tmp_extras_require = getattr(self, '_tmp_extras_require', tmp) for section, v in spec_ext_reqs.items(): # Do not strip empty sections. self._tmp_extras_require[section] @@ -590,7 +608,8 @@ class Distribution(_Distribution): for r in complex_reqs: self._tmp_extras_require[':' + str(r.marker)].append(r) self.extras_require = dict( - (k, [str(r) for r in map(self._clean_req, v)]) + # list(dict.fromkeys(...)) ensures a list of unique strings + (k, list(dict.fromkeys(str(r) for r in map(self._clean_req, v)))) for k, v in self._tmp_extras_require.items() ) @@ -808,16 +827,32 @@ class Distribution(_Distribution): except ValueError as e: raise DistutilsOptionError(e) from e + def _get_project_config_files(self, filenames): + """Add default file and split between INI and TOML""" + tomlfiles = [] + standard_project_metadata = Path(self.src_root or os.curdir, "pyproject.toml") + if filenames is not None: + parts = partition(lambda f: Path(f).suffix == ".toml", filenames) + filenames = list(parts[0]) # 1st element => predicate is False + tomlfiles = list(parts[1]) # 2nd element => predicate is True + elif standard_project_metadata.exists(): + tomlfiles = [standard_project_metadata] + return filenames, tomlfiles + def parse_config_files(self, filenames=None, ignore_option_errors=False): """Parses configuration files from various levels and loads configuration. - """ - self._parse_config_files(filenames=filenames) + inifiles, tomlfiles = self._get_project_config_files(filenames) + + self._parse_config_files(filenames=inifiles) - parse_configuration( + setupcfg.parse_configuration( self, self.command_options, ignore_option_errors=ignore_option_errors ) + for filename in tomlfiles: + pyprojecttoml.apply_configuration(self, filename, ignore_option_errors) + self._finalize_requires() self._finalize_license_files() @@ -876,25 +911,10 @@ class Distribution(_Distribution): Given an entry point, ensure that any declared extras for its distribution are installed. """ - reqs = { - req - for req in map(requirements.Requirement, always_iterable(ep.dist.requires)) - for extra in ep.extras - if extra in req.extras - } - missing = itertools.filterfalse(self._is_installed, reqs) - for req in missing: + for req in nspektr.missing(ep): # fetch_build_egg expects pkg_resources.Requirement self.fetch_build_egg(pkg_resources.Requirement(str(req))) - def _is_installed(self, req): - try: - dist = metadata.distribution(req.name) - except metadata.PackageNotFoundError: - return False - found_ver = packaging.version.Version(dist.version()) - return found_ver in req.specifier - def get_egg_cache_dir(self): egg_cache_dir = os.path.join(os.curdir, '.eggs') if not os.path.exists(egg_cache_dir): @@ -1186,6 +1206,13 @@ class Distribution(_Distribution): sys.stdout.detach(), encoding, errors, newline, line_buffering ) + def run_command(self, command): + self.set_defaults() + # Postpone defaults until all explicit configuration is considered + # (setup() args, config files, command line and plugins) + + super().run_command(command) + class DistDeprecationWarning(SetuptoolsDeprecationWarning): """Class for warning about deprecations in dist in diff --git a/setuptools/errors.py b/setuptools/errors.py index f4d35a63..ec7fb3b6 100644 --- a/setuptools/errors.py +++ b/setuptools/errors.py @@ -4,17 +4,6 @@ Provides exceptions used by setuptools modules. """ from distutils import errors as _distutils_errors -from distutils.errors import DistutilsError - - -class RemovedCommandError(DistutilsError, RuntimeError): - """Error used for commands that have been removed in setuptools. - - Since ``setuptools`` is built on ``distutils``, simply removing a command - from ``setuptools`` will make the behavior fall back to ``distutils``; this - error is raised if a command exists in ``distutils`` but has been actively - removed in ``setuptools``. - """ # Re-export errors from distutils to facilitate the migration to PEP632 @@ -38,3 +27,32 @@ UnknownFileError = _distutils_errors.UnknownFileError # The root error class in the hierarchy BaseError = _distutils_errors.DistutilsError + + +class RemovedCommandError(BaseError, RuntimeError): + """Error used for commands that have been removed in setuptools. + + Since ``setuptools`` is built on ``distutils``, simply removing a command + from ``setuptools`` will make the behavior fall back to ``distutils``; this + error is raised if a command exists in ``distutils`` but has been actively + removed in ``setuptools``. + """ + + +class PackageDiscoveryError(BaseError, RuntimeError): + """Impossible to perform automatic discovery of packages and/or modules. + + The current project layout or given discovery options can lead to problems when + scanning the project directory. + + Setuptools might also refuse to complete auto-discovery if an error prone condition + is detected (e.g. when a project is organised as a flat-layout but contains + multiple directories that can be taken as top-level packages inside a single + distribution [*]_). In these situations the users are encouraged to be explicit + about which packages to include or to make the discovery parameters more specific. + + .. [*] Since multi-package distributions are uncommon it is very likely that the + developers did not intend for all the directories to be packaged, and are just + leaving auxiliary code in the repository top-level, such as maintenance-related + scripts. + """ diff --git a/setuptools/extern/__init__.py b/setuptools/extern/__init__.py index 98235a4b..192e55f6 100644 --- a/setuptools/extern/__init__.py +++ b/setuptools/extern/__init__.py @@ -71,6 +71,6 @@ class VendorImporter: names = ( 'packaging', 'pyparsing', 'ordered_set', 'more_itertools', 'importlib_metadata', - 'zipp', 'importlib_resources', 'jaraco', 'typing_extensions', + 'zipp', 'importlib_resources', 'jaraco', 'typing_extensions', 'nspektr', 'tomli', ) VendorImporter(__name__, names, 'setuptools._vendor').install() diff --git a/setuptools/tests/config/__init__.py b/setuptools/tests/config/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/tests/config/__init__.py diff --git a/setuptools/tests/config/downloads/.gitignore b/setuptools/tests/config/downloads/.gitignore new file mode 100644 index 00000000..df3779fc --- /dev/null +++ b/setuptools/tests/config/downloads/.gitignore @@ -0,0 +1,4 @@ +* +!.gitignore +!__init__.py +!preload.py diff --git a/setuptools/tests/config/downloads/__init__.py b/setuptools/tests/config/downloads/__init__.py new file mode 100644 index 00000000..de43cffb --- /dev/null +++ b/setuptools/tests/config/downloads/__init__.py @@ -0,0 +1,51 @@ +import re +from pathlib import Path +from urllib.request import urlopen + +__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"] + + +NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/") +DOWNLOAD_DIR = Path(__file__).parent + + +# ---------------------------------------------------------------------- +# Please update ./preload.py accordingly when modifying this file +# ---------------------------------------------------------------------- + + +def output_file(url: str, download_dir: Path = DOWNLOAD_DIR): + file_name = url.strip() + for part in NAME_REMOVE: + file_name = file_name.replace(part, '').strip().strip('/:').strip() + return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name)) + + +def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR): + path = output_file(url, download_dir) + if path.exists(): + print(f"Skipping {url} (already exists: {path})") + else: + download_dir.mkdir(exist_ok=True, parents=True) + print(f"Downloading {url} to {path}") + download(url, path) + return path + + +def urls_from_file(list_file: Path): + """``list_file`` should be a text file where each line corresponds to a URL to + download. + """ + print(f"file: {list_file}") + content = list_file.read_text(encoding="utf-8") + return [url for url in content.splitlines() if not url.startswith("#")] + + +def download(url: str, dest: Path): + with urlopen(url) as f: + data = f.read() + + with open(dest, "wb") as f: + f.write(data) + + assert Path(dest).exists() diff --git a/setuptools/tests/config/downloads/preload.py b/setuptools/tests/config/downloads/preload.py new file mode 100644 index 00000000..64b3f1c8 --- /dev/null +++ b/setuptools/tests/config/downloads/preload.py @@ -0,0 +1,18 @@ +"""This file can be used to preload files needed for testing. + +For example you can use:: + + cd setuptools/tests/config + python -m downloads.preload setupcfg_examples.txt + +to make sure the `setup.cfg` examples are downloaded before starting the tests. +""" +import sys +from pathlib import Path + +from . import retrieve_file, urls_from_file + + +if __name__ == "__main__": + urls = urls_from_file(Path(sys.argv[1])) + list(map(retrieve_file, urls)) diff --git a/setuptools/tests/config/setupcfg_examples.txt b/setuptools/tests/config/setupcfg_examples.txt new file mode 100644 index 00000000..5db35654 --- /dev/null +++ b/setuptools/tests/config/setupcfg_examples.txt @@ -0,0 +1,23 @@ +# ==================================================================== +# Some popular packages that use setup.cfg (and others not so popular) +# Reference: https://hugovk.github.io/top-pypi-packages/ +# ==================================================================== +https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg +https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg +https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg +https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg +https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg +https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg +https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg +https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg +https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg +https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg +https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg +https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg +https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg +https://github.com/tqdm/tqdm/raw/fc69d5dcf578f7c7986fa76841a6b793f813df35/setup.cfg +https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg +https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg +https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg +https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg +https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py new file mode 100644 index 00000000..045d7f40 --- /dev/null +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -0,0 +1,316 @@ +"""Make sure that applying the configuration from pyproject.toml is equivalent to +applying a similar configuration from setup.cfg + +To run these tests offline, please have a look on ``./downloads/preload.py`` +""" +import io +import re +import tarfile +from pathlib import Path +from unittest.mock import Mock +from zipfile import ZipFile + +import pytest +from ini2toml.api import Translator + +import setuptools # noqa ensure monkey patch to metadata +from setuptools.dist import Distribution +from setuptools.config import setupcfg, pyprojecttoml +from setuptools.config import expand +from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField, _some_attrgetter +from setuptools.command.egg_info import write_requirements + +from .downloads import retrieve_file, urls_from_file + + +HERE = Path(__file__).parent +EXAMPLES_FILE = "setupcfg_examples.txt" + + +def makedist(path, **attrs): + return Distribution({"src_root": path, **attrs}) + + +@pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE)) +@pytest.mark.filterwarnings("ignore") +@pytest.mark.uses_network +def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path): + monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1")) + setupcfg_example = retrieve_file(url) + pyproject_example = Path(tmp_path, "pyproject.toml") + toml_config = Translator().translate(setupcfg_example.read_text(), "setup.cfg") + pyproject_example.write_text(toml_config) + + dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example) + dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example) + + pkg_info_toml = core_metadata(dist_toml) + pkg_info_cfg = core_metadata(dist_cfg) + assert pkg_info_toml == pkg_info_cfg + + if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)): + assert set(dist_toml.license_files) == set(dist_cfg.license_files) + + if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)): + print(dist_cfg.entry_points) + ep_toml = {(k, *sorted(i.replace(" ", "") for i in v)) + for k, v in dist_toml.entry_points.items()} + ep_cfg = {(k, *sorted(i.replace(" ", "") for i in v)) + for k, v in dist_cfg.entry_points.items()} + assert ep_toml == ep_cfg + + if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)): + pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()} + pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()} + assert pkg_data_toml == pkg_data_cfg + + if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)): + data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files} + data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files} + assert data_files_toml == data_files_cfg + + assert set(dist_toml.install_requires) == set(dist_cfg.install_requires) + if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)): + if ( + "testing" in dist_toml.extras_require + and "testing" not in dist_cfg.extras_require + ): + # ini2toml can automatically convert `tests_require` to `testing` extra + dist_toml.extras_require.pop("testing") + extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()} + extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()} + assert extra_req_toml == extra_req_cfg + + +PEP621_EXAMPLE = """\ +[project] +name = "spam" +version = "2020.0.0" +description = "Lovely Spam! Wonderful Spam!" +readme = "README.rst" +requires-python = ">=3.8" +license = {file = "LICENSE.txt"} +keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"] +authors = [ + {email = "hi@pradyunsg.me"}, + {name = "Tzu-Ping Chung"} +] +maintainers = [ + {name = "Brett Cannon", email = "brett@python.org"} +] +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python" +] + +dependencies = [ + "httpx", + "gidgethub[httpx]>4.0.0", + "django>2.1; os_name != 'nt'", + "django>2.0; os_name == 'nt'" +] + +[project.optional-dependencies] +test = [ + "pytest < 5.0.0", + "pytest-cov[all]" +] + +[project.urls] +homepage = "http://example.com" +documentation = "http://readthedocs.org" +repository = "http://github.com" +changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md" + +[project.scripts] +spam-cli = "spam:main_cli" + +[project.gui-scripts] +spam-gui = "spam:main_gui" + +[project.entry-points."spam.magical"] +tomatoes = "spam:main_tomatoes" +""" + +PEP621_EXAMPLE_SCRIPT = """ +def main_cli(): pass +def main_gui(): pass +def main_tomatoes(): pass +""" + + +def _pep621_example_project(tmp_path, readme="README.rst"): + pyproject = tmp_path / "pyproject.toml" + text = PEP621_EXAMPLE + replacements = {'readme = "README.rst"': f'readme = "{readme}"'} + for orig, subst in replacements.items(): + text = text.replace(orig, subst) + pyproject.write_text(text) + + (tmp_path / readme).write_text("hello world") + (tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---") + (tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT) + return pyproject + + +def test_pep621_example(tmp_path): + """Make sure the example in PEP 621 works""" + pyproject = _pep621_example_project(tmp_path) + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert dist.metadata.license == "--- LICENSE stub ---" + assert set(dist.metadata.license_files) == {"LICENSE.txt"} + + +@pytest.mark.parametrize( + "readme, ctype", + [ + ("Readme.txt", "text/plain"), + ("readme.md", "text/markdown"), + ("text.rst", "text/x-rst"), + ] +) +def test_readme_content_type(tmp_path, readme, ctype): + pyproject = _pep621_example_project(tmp_path, readme) + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert dist.metadata.long_description_content_type == ctype + + +def test_undefined_content_type(tmp_path): + pyproject = _pep621_example_project(tmp_path, "README.tex") + with pytest.raises(ValueError, match="Undefined content type for README.tex"): + pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + + +def test_no_explicit_content_type_for_missing_extension(tmp_path): + pyproject = _pep621_example_project(tmp_path, "README") + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert dist.metadata.long_description_content_type is None + + +# TODO: After PEP 639 is accepted, we have to move the license-files +# to the `project` table instead of `tool.setuptools` +def test_license_and_license_files(tmp_path): + pyproject = _pep621_example_project(tmp_path, "README") + text = pyproject.read_text(encoding="utf-8") + + # Sanity-check + assert 'license = {file = "LICENSE.txt"}' in text + assert "[tool.setuptools]" not in text + + text += '\n[tool.setuptools]\nlicense-files = ["_FILE*"]\n' + pyproject.write_text(text, encoding="utf-8") + (tmp_path / "_FILE.txt").touch() + (tmp_path / "_FILE.rst").touch() + + # Would normally match the `license_files` glob patterns, but we want to exclude it + # by being explicit. On the other hand, its contents should be added to `license` + (tmp_path / "LICENSE.txt").write_text("LicenseRef-Proprietary\n", encoding="utf-8") + + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"} + assert dist.metadata.license == "LicenseRef-Proprietary\n" + + +class TestPresetField: + def pyproject(self, tmp_path, dynamic, extra_content=""): + content = f"[project]\nname = 'proj'\ndynamic = {dynamic!r}\n" + if "version" not in dynamic: + content += "version = '42'\n" + file = tmp_path / "pyproject.toml" + file.write_text(content + extra_content, encoding="utf-8") + return file + + @pytest.mark.parametrize( + "attr, field, value", + [ + ("install_requires", "dependencies", ["six"]), + ("classifiers", "classifiers", ["Private :: Classifier"]), + ] + ) + def test_not_listed_in_dynamic(self, tmp_path, attr, field, value): + """For the time being we just warn if the user pre-set values (e.g. via + ``setup.py``) but do not include them in ``dynamic``. + """ + pyproject = self.pyproject(tmp_path, []) + dist = makedist(tmp_path, **{attr: value}) + msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S) + with pytest.warns(_WouldIgnoreField, match=msg): + dist = pyprojecttoml.apply_configuration(dist, pyproject) + + # TODO: Once support for pyproject.toml config stabilizes attr should be None + dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist) + assert dist_value == value + + @pytest.mark.parametrize( + "attr, field, value", + [ + ("install_requires", "dependencies", []), + ("extras_require", "optional-dependencies", {}), + ("install_requires", "dependencies", ["six"]), + ("classifiers", "classifiers", ["Private :: Classifier"]), + ] + ) + def test_listed_in_dynamic(self, tmp_path, attr, field, value): + pyproject = self.pyproject(tmp_path, [field]) + dist = makedist(tmp_path, **{attr: value}) + dist = pyprojecttoml.apply_configuration(dist, pyproject) + dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist) + assert dist_value == value + + def test_optional_dependencies_dont_remove_env_markers(self, tmp_path): + """ + Internally setuptools converts dependencies with markers to "extras". + If ``install_requires`` is given by ``setup.py``, we have to ensure that + applying ``optional-dependencies`` does not overwrite the mandatory + dependencies with markers (see #3204). + """ + # If setuptools replace its internal mechanism that uses `requires.txt` + # this test has to be rewritten to adapt accordingly + extra = "\n[project.optional-dependencies]\nfoo = ['bar>1']\n" + pyproject = self.pyproject(tmp_path, ["dependencies"], extra) + install_req = ['importlib-resources (>=3.0.0) ; python_version < "3.7"'] + dist = makedist(tmp_path, install_requires=install_req) + dist = pyprojecttoml.apply_configuration(dist, pyproject) + assert "foo" in dist.extras_require + assert ':python_version < "3.7"' in dist.extras_require + egg_info = dist.get_command_obj("egg_info") + write_requirements(egg_info, tmp_path, tmp_path / "requires.txt") + reqs = (tmp_path / "requires.txt").read_text(encoding="utf-8") + assert "importlib-resources" in reqs + assert "bar" in reqs + + +class TestMeta: + def test_example_file_in_sdist(self, setuptools_sdist): + """Meta test to ensure tests can run from sdist""" + with tarfile.open(setuptools_sdist) as tar: + assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames()) + + def test_example_file_not_in_wheel(self, setuptools_wheel): + """Meta test to ensure auxiliary test files are not in wheel""" + with ZipFile(setuptools_wheel) as zipfile: + assert not any(name.endswith(EXAMPLES_FILE) for name in zipfile.namelist()) + + +# --- Auxiliary Functions --- + + +def core_metadata(dist) -> str: + with io.StringIO() as buffer: + dist.metadata.write_pkg_file(buffer) + value = "\n".join(buffer.getvalue().strip().splitlines()) + + # ---- DIFF NORMALISATION ---- + # PEP 621 is very particular about author/maintainer metadata conversion, so skip + value = re.sub(r"^(Author|Maintainer)(-email)?:.*$", "", value, flags=re.M) + # May be redundant with Home-page + value = re.sub(r"^Project-URL: Homepage,.*$", "", value, flags=re.M) + # May be missing in original (relying on default) but backfilled in the TOML + value = re.sub(r"^Description-Content-Type:.*$", "", value, flags=re.M) + # ini2toml can automatically convert `tests_require` to `testing` extra + value = value.replace("Provides-Extra: testing\n", "") + # Remove empty lines + value = re.sub(r"^\s*$", "", value, flags=re.M) + value = re.sub(r"^\n", "", value, flags=re.M) + + return value diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py new file mode 100644 index 00000000..3a59edbb --- /dev/null +++ b/setuptools/tests/config/test_expand.py @@ -0,0 +1,155 @@ +import os + +import pytest + +from distutils.errors import DistutilsOptionError +from setuptools.config import expand +from setuptools.discovery import find_package_path + + +def write_files(files, root_dir): + for file, content in files.items(): + path = root_dir / file + path.parent.mkdir(exist_ok=True, parents=True) + path.write_text(content) + + +def test_glob_relative(tmp_path, monkeypatch): + files = { + "dir1/dir2/dir3/file1.txt", + "dir1/dir2/file2.txt", + "dir1/file3.txt", + "a.ini", + "b.ini", + "dir1/c.ini", + "dir1/dir2/a.ini", + } + + write_files({k: "" for k in files}, tmp_path) + patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"] + monkeypatch.chdir(tmp_path) + assert set(expand.glob_relative(patterns)) == files + # Make sure the same APIs work outside cwd + assert set(expand.glob_relative(patterns, tmp_path)) == files + + +def test_read_files(tmp_path, monkeypatch): + + dir_ = tmp_path / "dir_" + (tmp_path / "_dir").mkdir(exist_ok=True) + (tmp_path / "a.txt").touch() + files = { + "a.txt": "a", + "dir1/b.txt": "b", + "dir1/dir2/c.txt": "c" + } + write_files(files, dir_) + + with monkeypatch.context() as m: + m.chdir(dir_) + assert expand.read_files(list(files)) == "a\nb\nc" + + cannot_access_msg = r"Cannot access '.*\.\..a\.txt'" + with pytest.raises(DistutilsOptionError, match=cannot_access_msg): + expand.read_files(["../a.txt"]) + + # Make sure the same APIs work outside cwd + assert expand.read_files(list(files), dir_) == "a\nb\nc" + with pytest.raises(DistutilsOptionError, match=cannot_access_msg): + expand.read_files(["../a.txt"], dir_) + + +class TestReadAttr: + def test_read_attr(self, tmp_path, monkeypatch): + files = { + "pkg/__init__.py": "", + "pkg/sub/__init__.py": "VERSION = '0.1.1'", + "pkg/sub/mod.py": ( + "VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\n" + "raise SystemExit(1)" + ), + } + write_files(files, tmp_path) + + with monkeypatch.context() as m: + m.chdir(tmp_path) + # Make sure it can read the attr statically without evaluating the module + assert expand.read_attr('pkg.sub.VERSION') == '0.1.1' + values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}) + + assert values['a'] == 0 + assert values['b'] == {42} + + # Make sure the same APIs work outside cwd + assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1' + values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path) + assert values['c'] == (0, 1, 1) + + def test_import_order(self, tmp_path): + """ + Sometimes the import machinery will import the parent package of a nested + module, which triggers side-effects and might create problems (see issue #3176) + + ``read_attr`` should bypass these limitations by resolving modules statically + (via ast.literal_eval). + """ + files = { + "src/pkg/__init__.py": "from .main import func\nfrom .about import version", + "src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42", + "src/pkg/about.py": "version = '42'", + } + write_files(files, tmp_path) + attr_desc = "pkg.about.version" + package_dir = {"": "src"} + # `import super_complicated_dep` should not run, otherwise the build fails + assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42" + + +@pytest.mark.parametrize( + 'package_dir, file, module, return_value', + [ + ({"": "src"}, "src/pkg/main.py", "pkg.main", 42), + ({"pkg": "lib"}, "lib/main.py", "pkg.main", 13), + ({}, "single_module.py", "single_module", 70), + ({}, "flat_layout/pkg.py", "flat_layout.pkg", 836), + ] +) +def test_resolve_class(tmp_path, package_dir, file, module, return_value): + files = {file: f"class Custom:\n def testing(self): return {return_value}"} + write_files(files, tmp_path) + cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path) + assert cls().testing() == return_value + + +@pytest.mark.parametrize( + 'args, pkgs', + [ + ({"where": ["."], "namespaces": False}, {"pkg", "other"}), + ({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}), + ({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}), + ({}, {"pkg", "other", "dir1", "dir1.dir2"}), # default value for `namespaces` + ] +) +def test_find_packages(tmp_path, monkeypatch, args, pkgs): + files = { + "pkg/__init__.py", + "other/__init__.py", + "dir1/dir2/__init__.py", + } + write_files({k: "" for k in files}, tmp_path) + + package_dir = {} + kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args} + where = kwargs.get("where", ["."]) + assert set(expand.find_packages(**kwargs)) == pkgs + for pkg in pkgs: + pkg_path = find_package_path(pkg, package_dir, tmp_path) + assert os.path.exists(pkg_path) + + # Make sure the same APIs work outside cwd + where = [ + str((tmp_path / p).resolve()).replace(os.sep, "/") # ensure posix-style paths + for p in args.pop("where", ["."]) + ] + + assert set(expand.find_packages(where=where, **args)) == pkgs diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py new file mode 100644 index 00000000..4c237014 --- /dev/null +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -0,0 +1,401 @@ +import logging +import re +from configparser import ConfigParser +from inspect import cleandoc + +import pytest +import tomli_w +from path import Path as _Path + +from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField +from setuptools.config.pyprojecttoml import ( + read_configuration, + expand_configuration, + apply_configuration, + validate, + _InvalidFile, +) +from setuptools.dist import Distribution +from setuptools.errors import OptionError + + +import setuptools # noqa -- force distutils.core to be patched +import distutils.core + +EXAMPLE = """ +[project] +name = "myproj" +keywords = ["some", "key", "words"] +dynamic = ["version", "readme"] +requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +dependencies = [ + 'importlib-metadata>=0.12;python_version<"3.8"', + 'importlib-resources>=1.0;python_version<"3.7"', + 'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"', +] + +[project.optional-dependencies] +docs = [ + "sphinx>=3", + "sphinx-argparse>=0.2.5", + "sphinx-rtd-theme>=0.4.3", +] +testing = [ + "pytest>=1", + "coverage>=3,<5", +] + +[project.scripts] +exec = "pkg.__main__:exec" + +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +package-dir = {"" = "src"} +zip-safe = true +platforms = ["any"] + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.setuptools.cmdclass] +sdist = "pkg.mod.CustomSdist" + +[tool.setuptools.dynamic.version] +attr = "pkg.__version__.VERSION" + +[tool.setuptools.dynamic.readme] +file = ["README.md"] +content-type = "text/markdown" + +[tool.setuptools.package-data] +"*" = ["*.txt"] + +[tool.setuptools.data-files] +"data" = ["_files/*.txt"] + +[tool.distutils.sdist] +formats = "gztar" + +[tool.distutils.bdist_wheel] +universal = true +""" + + +def create_example(path, pkg_root): + pyproject = path / "pyproject.toml" + + files = [ + f"{pkg_root}/pkg/__init__.py", + "_files/file.txt", + ] + if pkg_root != ".": # flat-layout will raise error for multi-package dist + # Ensure namespaces are discovered + files.append(f"{pkg_root}/other/nested/__init__.py") + + for file in files: + (path / file).parent.mkdir(exist_ok=True, parents=True) + (path / file).touch() + + pyproject.write_text(EXAMPLE) + (path / "README.md").write_text("hello world") + (path / f"{pkg_root}/pkg/mod.py").write_text("class CustomSdist: pass") + (path / f"{pkg_root}/pkg/__version__.py").write_text("VERSION = (3, 10)") + (path / f"{pkg_root}/pkg/__main__.py").write_text("def exec(): print('hello')") + + +def verify_example(config, path, pkg_root): + pyproject = path / "pyproject.toml" + pyproject.write_text(tomli_w.dumps(config), encoding="utf-8") + expanded = expand_configuration(config, path) + expanded_project = expanded["project"] + assert read_configuration(pyproject, expand=True) == expanded + assert expanded_project["version"] == "3.10" + assert expanded_project["readme"]["text"] == "hello world" + assert "packages" in expanded["tool"]["setuptools"] + if pkg_root == ".": + # Auto-discovery will raise error for multi-package dist + assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"} + else: + assert set(expanded["tool"]["setuptools"]["packages"]) == { + "pkg", + "other", + "other.nested", + } + assert expanded["tool"]["setuptools"]["include-package-data"] is True + assert "" in expanded["tool"]["setuptools"]["package-data"] + assert "*" not in expanded["tool"]["setuptools"]["package-data"] + assert expanded["tool"]["setuptools"]["data-files"] == [ + ("data", ["_files/file.txt"]) + ] + + +def test_read_configuration(tmp_path): + create_example(tmp_path, "src") + pyproject = tmp_path / "pyproject.toml" + + config = read_configuration(pyproject, expand=False) + assert config["project"].get("version") is None + assert config["project"].get("readme") is None + + verify_example(config, tmp_path, "src") + + +@pytest.mark.parametrize( + "pkg_root, opts", + [ + (".", {}), + ("src", {}), + ("lib", {"packages": {"find": {"where": ["lib"]}}}), + ], +) +def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts): + create_example(tmp_path, pkg_root) + + pyproject = tmp_path / "pyproject.toml" + + config = read_configuration(pyproject, expand=False) + assert config["project"].get("version") is None + assert config["project"].get("readme") is None + config["tool"]["setuptools"].pop("packages", None) + config["tool"]["setuptools"].pop("package-dir", None) + + config["tool"]["setuptools"].update(opts) + verify_example(config, tmp_path, pkg_root) + + +ENTRY_POINTS = { + "console_scripts": {"a": "mod.a:func"}, + "gui_scripts": {"b": "mod.b:func"}, + "other": {"c": "mod.c:func [extra]"}, +} + + +class TestEntryPoints: + def write_entry_points(self, tmp_path): + entry_points = ConfigParser() + entry_points.read_dict(ENTRY_POINTS) + with open(tmp_path / "entry-points.txt", "w") as f: + entry_points.write(f) + + def pyproject(self, dynamic=None): + project = {"dynamic": dynamic or ["scripts", "gui-scripts", "entry-points"]} + tool = {"dynamic": {"entry-points": {"file": "entry-points.txt"}}} + return {"project": project, "tool": {"setuptools": tool}} + + def test_all_listed_in_dynamic(self, tmp_path): + self.write_entry_points(tmp_path) + expanded = expand_configuration(self.pyproject(), tmp_path) + expanded_project = expanded["project"] + assert len(expanded_project["scripts"]) == 1 + assert expanded_project["scripts"]["a"] == "mod.a:func" + assert len(expanded_project["gui-scripts"]) == 1 + assert expanded_project["gui-scripts"]["b"] == "mod.b:func" + assert len(expanded_project["entry-points"]) == 1 + assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]" + + @pytest.mark.parametrize("missing_dynamic", ("scripts", "gui-scripts")) + def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic): + self.write_entry_points(tmp_path) + dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic} + + msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}" + with pytest.warns(_WouldIgnoreField, match=re.compile(msg, re.S)): + expanded = expand_configuration(self.pyproject(dynamic), tmp_path) + + expanded_project = expanded["project"] + assert dynamic < set(expanded_project) + assert len(expanded_project["entry-points"]) == 1 + # TODO: Test the following when pyproject.toml support stabilizes: + # >>> assert missing_dynamic not in expanded_project + + +class TestClassifiers: + def test_dynamic(self, tmp_path): + # Let's create a project example that has dynamic classifiers + # coming from a txt file. + create_example(tmp_path, "src") + classifiers = """\ + Framework :: Flask + Programming Language :: Haskell + """ + (tmp_path / "classifiers.txt").write_text(cleandoc(classifiers)) + + pyproject = tmp_path / "pyproject.toml" + config = read_configuration(pyproject, expand=False) + dynamic = config["project"]["dynamic"] + config["project"]["dynamic"] = list({*dynamic, "classifiers"}) + dynamic_config = config["tool"]["setuptools"]["dynamic"] + dynamic_config["classifiers"] = {"file": "classifiers.txt"} + + # When the configuration is expanded, + # each line of the file should be an different classifier. + validate(config, pyproject) + expanded = expand_configuration(config, tmp_path) + + assert set(expanded["project"]["classifiers"]) == { + "Framework :: Flask", + "Programming Language :: Haskell", + } + + def test_dynamic_without_config(self, tmp_path): + config = """ + [project] + name = "myproj" + version = '42' + dynamic = ["classifiers"] + """ + + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(config)) + with pytest.raises(OptionError, match="No configuration .* .classifiers."): + read_configuration(pyproject) + + def test_dynamic_without_file(self, tmp_path): + config = """ + [project] + name = "myproj" + version = '42' + dynamic = ["classifiers"] + + [tool.setuptools.dynamic] + classifiers = {file = ["classifiers.txt"]} + """ + + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(config)) + with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"): + expanded = read_configuration(pyproject) + assert "classifiers" not in expanded["project"] + + +@pytest.mark.parametrize( + "example", + ( + """ + [project] + name = "myproj" + version = "1.2" + + [my-tool.that-disrespect.pep518] + value = 42 + """, + ), +) +def test_ignore_unrelated_config(tmp_path, example): + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(example)) + + # Make sure no error is raised due to 3rd party configs in pyproject.toml + assert read_configuration(pyproject) is not None + + +@pytest.mark.parametrize( + "example, error_msg, value_shown_in_debug", + [ + ( + """ + [project] + name = "myproj" + version = "1.2" + requires = ['pywin32; platform_system=="Windows"' ] + """, + "configuration error: `project` must not contain {'requires'} properties", + '"requires": ["pywin32; platform_system==\\"Windows\\""]', + ), + ], +) +def test_invalid_example(tmp_path, caplog, example, error_msg, value_shown_in_debug): + caplog.set_level(logging.DEBUG) + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(example)) + + caplog.clear() + with pytest.raises(ValueError, match="invalid pyproject.toml"): + read_configuration(pyproject) + + # Make sure the logs give guidance to the user + error_log = caplog.record_tuples[0] + assert error_log[1] == logging.ERROR + assert error_msg in error_log[2] + + debug_log = caplog.record_tuples[1] + assert debug_log[1] == logging.DEBUG + debug_msg = "".join(line.strip() for line in debug_log[2].splitlines()) + assert value_shown_in_debug in debug_msg + + +@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42")) +def test_empty(tmp_path, config): + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(config) + + # Make sure no error is raised + assert read_configuration(pyproject) == {} + + +@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",)) +def test_include_package_data_by_default(tmp_path, config): + """Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as + default. + """ + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(config) + + config = read_configuration(pyproject) + assert config["tool"]["setuptools"]["include-package-data"] is True + + +def test_include_package_data_in_setuppy(tmp_path): + """Builds with ``pyproject.toml`` should consider ``include_package_data`` set in + ``setup.py``. + + See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889 + """ + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text("[project]\nname = 'myproj'\nversion='42'\n") + setuppy = tmp_path / "setup.py" + setuppy.write_text("__import__('setuptools').setup(include_package_data=False)") + + with _Path(tmp_path): + dist = distutils.core.run_setup("setup.py", {}, stop_after="config") + + assert dist.get_name() == "myproj" + assert dist.get_version() == "42" + assert dist.include_package_data is False + + +class TestSkipBadConfig: + @pytest.mark.parametrize( + "setup_attrs", + [ + {"name": "myproj"}, + {"install_requires": ["does-not-exist"]}, + ], + ) + @pytest.mark.parametrize( + "pyproject_content", + [ + "[project]\nrequires-python = '>=3.7'\n", + "[project]\nversion = '42'\nrequires-python = '>=3.7'\n", + "[project]\nname='othername'\nrequires-python = '>=3.7'\n", + ], + ) + def test_popular_config(self, tmp_path, pyproject_content, setup_attrs): + # See pypa/setuptools#3199 and pypa/cibuildwheel#1064 + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(pyproject_content) + dist = Distribution(attrs=setup_attrs) + + prev_name = dist.get_name() + prev_deps = dist.install_requires + + with pytest.warns(_InvalidFile, match=r"DO NOT include.*\[project\].* table"): + dist = apply_configuration(dist, pyproject) + + assert dist.get_name() != "othername" + assert dist.get_name() == prev_name + assert dist.python_requires is None + assert set(dist.install_requires) == set(prev_deps) diff --git a/setuptools/tests/test_config.py b/setuptools/tests/config/test_setupcfg.py index 005742e4..1f35f836 100644 --- a/setuptools/tests/test_config.py +++ b/setuptools/tests/config/test_setupcfg.py @@ -1,21 +1,20 @@ -import types -import sys - -import contextlib import configparser +import contextlib +import inspect +from pathlib import Path +from unittest.mock import Mock, patch import pytest from distutils.errors import DistutilsOptionError, DistutilsFileError -from mock import patch from setuptools.dist import Distribution, _Distribution -from setuptools.config import ConfigHandler, read_configuration -from distutils.core import Command -from .textwrap import DALS +from setuptools.config.setupcfg import ConfigHandler, read_configuration +from ..textwrap import DALS class ErrConfigHandler(ConfigHandler): """Erroneous handler. Fails to implement required methods.""" + section_prefix = "**err**" def make_package_dir(name, base_dir, ns=False): @@ -70,7 +69,7 @@ def get_dist(tmpdir, kwargs_initial=None, parse=True): def test_parsers_implemented(): with pytest.raises(NotImplementedError): - handler = ErrConfigHandler(None, {}) + handler = ErrConfigHandler(None, {}, False, Mock()) handler.parsers @@ -186,9 +185,12 @@ class TestMetadata: def test_file_sandboxed(self, tmpdir): - fake_env(tmpdir, '[metadata]\n' 'long_description = file: ../../README\n') + tmpdir.ensure("README") + project = tmpdir.join('depth1', 'depth2') + project.ensure(dir=True) + fake_env(project, '[metadata]\n' 'long_description = file: ../../README\n') - with get_dist(tmpdir, parse=False) as dist: + with get_dist(project, parse=False) as dist: with pytest.raises(DistutilsOptionError): dist.parse_config_files() # file: out of sandbox @@ -859,22 +861,25 @@ class TestOptions: dist.parse_config_files() def test_cmdclass(self, tmpdir): - class CustomCmd(Command): - pass - - m = types.ModuleType('custom_build', 'test package') - - m.__dict__['CustomCmd'] = CustomCmd - - sys.modules['custom_build'] = m - - fake_env( - tmpdir, - '[options]\n' 'cmdclass =\n' ' customcmd = custom_build.CustomCmd\n', + module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src + module_path.parent.mkdir(parents=True, exist_ok=True) + module_path.write_text( + "from distutils.core import Command\n" + "class CustomCmd(Command): pass\n" ) + setup_cfg = """ + [options] + cmdclass = + customcmd = custom_build.CustomCmd + """ + fake_env(tmpdir, inspect.cleandoc(setup_cfg)) + with get_dist(tmpdir) as dist: - assert dist.cmdclass == {'customcmd': CustomCmd} + cmdclass = dist.cmdclass['customcmd'] + assert cmdclass.__name__ == "CustomCmd" + assert cmdclass.__module__ == "custom_build" + assert module_path.samefile(inspect.getfile(cmdclass)) saved_dist_init = _Distribution.__init__ diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py index e912399d..25ab49fd 100644 --- a/setuptools/tests/fixtures.py +++ b/setuptools/tests/fixtures.py @@ -1,6 +1,8 @@ +import os import contextlib import sys import subprocess +from pathlib import Path import pytest import path @@ -64,6 +66,9 @@ def sample_project(tmp_path): @pytest.fixture(scope="session") def setuptools_sdist(tmp_path_factory, request): + if os.getenv("PRE_BUILT_SETUPTOOLS_SDIST"): + return Path(os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")).resolve() + with contexts.session_locked_tmp_dir( request, tmp_path_factory, "sdist_build") as tmp: dist = next(tmp.glob("*.tar.gz"), None) @@ -79,6 +84,9 @@ def setuptools_sdist(tmp_path_factory, request): @pytest.fixture(scope="session") def setuptools_wheel(tmp_path_factory, request): + if os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL"): + return Path(os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")).resolve() + with contexts.session_locked_tmp_dir( request, tmp_path_factory, "wheel_build") as tmp: dist = next(tmp.glob("*.whl"), None) diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py index 43f43902..24c02be0 100644 --- a/setuptools/tests/integration/helpers.py +++ b/setuptools/tests/integration/helpers.py @@ -8,6 +8,7 @@ import os import subprocess import tarfile from zipfile import ZipFile +from pathlib import Path def run(cmd, env=None): @@ -59,3 +60,16 @@ class Archive: raise ValueError(msg) return str(content.read(), "utf-8") return str(self._obj.read(zip_or_tar_info), "utf-8") + + +def get_sdist_members(sdist_path): + with tarfile.open(sdist_path, "r:gz") as tar: + files = [Path(f) for f in tar.getnames()] + # remove root folder + relative_files = ("/".join(f.parts[1:]) for f in files) + return {f for f in relative_files if f} + + +def get_wheel_members(wheel_path): + with ZipFile(wheel_path) as zipfile: + return set(zipfile.namelist()) diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py index 86cc4235..9d11047b 100644 --- a/setuptools/tests/integration/test_pip_install_sdist.py +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -53,7 +53,7 @@ EXAMPLES = [ ("brotli", LATEST), # not in the list but used by urllib3 # When adding packages to this list, make sure they expose a `__version__` - # attribute, or modify the tests bellow + # attribute, or modify the tests below ] @@ -112,6 +112,7 @@ ALREADY_LOADED = ("pytest", "mypy") # loaded by pytest/pytest-enabler @pytest.mark.parametrize('package, version', EXAMPLES) +@pytest.mark.uses_network def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel): venv_pip = (venv_python, "-m", "pip") sdist = retrieve_sdist(package, version, tmp_path) diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index eb43fe9b..36940e76 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -7,17 +7,27 @@ import importlib import contextlib from concurrent import futures import re +from zipfile import ZipFile import pytest from jaraco import path from .textwrap import DALS +SETUP_SCRIPT_STUB = "__import__('setuptools').setup()" + TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds IS_PYPY = '__pypy__' in sys.builtin_module_names +pytestmark = pytest.mark.skipif( + sys.platform == "win32" and IS_PYPY, + reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor " + "is flaky and problematic" +) + + class BuildBackendBase: def __init__(self, cwd='.', env={}, backend_name='setuptools.build_meta'): self.cwd = cwd @@ -82,7 +92,7 @@ class BuildBackendCaller(BuildBackendBase): defns = [ - { + { # simple setup.py script 'setup.py': DALS(""" __import__('setuptools').setup( name='foo', @@ -96,7 +106,7 @@ defns = [ print('hello') """), }, - { + { # setup.py that relies on __name__ 'setup.py': DALS(""" assert __name__ == '__main__' __import__('setuptools').setup( @@ -111,7 +121,7 @@ defns = [ print('hello') """), }, - { + { # setup.py script that runs arbitrary code 'setup.py': DALS(""" variable = True def function(): @@ -129,7 +139,45 @@ defns = [ print('hello') """), }, - { + { # setup.py script that constructs temp files to be included in the distribution + 'setup.py': DALS(""" + # Some packages construct files on the fly, include them in the package, + # and immediately remove them after `setup()` (e.g. pybind11==2.9.1). + # Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)` + # to obtain a distribution object first, and then run the distutils + # commands later, because these files will be removed in the meantime. + + with open('world.py', 'w') as f: + f.write('x = 42') + + try: + __import__('setuptools').setup( + name='foo', + version='0.0.0', + py_modules=['world'], + setup_requires=['six'], + ) + finally: + # Some packages will clean temporary files + __import__('os').unlink('world.py') + """), + }, + { # setup.cfg only + 'setup.cfg': DALS(""" + [metadata] + name = foo + version = 0.0.0 + + [options] + py_modules=hello + setup_requires=six + """), + 'hello.py': DALS(""" + def run(): + print('hello') + """) + }, + { # setup.cfg and setup.py 'setup.cfg': DALS(""" [metadata] name = foo @@ -139,6 +187,7 @@ defns = [ py_modules=hello setup_requires=six """), + 'setup.py': "__import__('setuptools').setup()", 'hello.py': DALS(""" def run(): print('hello') @@ -174,7 +223,20 @@ class TestBuildMetaBackend: os.makedirs(dist_dir) wheel_name = build_backend.build_wheel(dist_dir) - assert os.path.isfile(os.path.join(dist_dir, wheel_name)) + wheel_file = os.path.join(dist_dir, wheel_name) + assert os.path.isfile(wheel_file) + + # Temporary files should be removed + assert not os.path.isfile('world.py') + + with ZipFile(wheel_file) as zipfile: + wheel_contents = set(zipfile.namelist()) + + # Each one of the examples have a single module + # that should be included in the distribution + python_scripts = (f for f in wheel_contents if f.endswith('.py')) + modules = [f for f in python_scripts if not f.endswith('setup.py')] + assert len(modules) == 1 @pytest.mark.parametrize('build_type', ('wheel', 'sdist')) def test_build_with_existing_file_present(self, build_type, tmpdir_cwd): @@ -223,6 +285,190 @@ class TestBuildMetaBackend: assert third_result == second_result assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0 + @pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB]) + def test_build_with_pyproject_config(self, tmpdir, setup_script): + files = { + 'pyproject.toml': DALS(""" + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "foo" + license = {text = "MIT"} + description = "This is a Python package" + dynamic = ["version", "readme"] + classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers" + ] + urls = {Homepage = "http://github.com"} + dependencies = [ + "appdirs", + ] + + [project.optional-dependencies] + all = [ + "tomli>=1", + "pyscaffold>=4,<5", + 'importlib; python_version == "2.6"', + ] + + [project.scripts] + foo = "foo.cli:main" + + [tool.setuptools] + zip-safe = false + package-dir = {"" = "src"} + packages = {find = {where = ["src"]}} + license-files = ["LICENSE*"] + + [tool.setuptools.dynamic] + version = {attr = "foo.__version__"} + readme = {file = "README.rst"} + + [tool.distutils.sdist] + formats = "gztar" + + [tool.distutils.bdist_wheel] + universal = true + """), + "MANIFEST.in": DALS(""" + global-include *.py *.txt + global-exclude *.py[cod] + """), + "README.rst": "This is a ``README``", + "LICENSE.txt": "---- placeholder MIT license ----", + "src": { + "foo": { + "__init__.py": "__version__ = '0.1'", + "cli.py": "def main(): print('hello world')", + "data.txt": "def main(): print('hello world')", + } + } + } + if setup_script: + files["setup.py"] = setup_script + + build_backend = self.get_build_backend() + with tmpdir.as_cwd(): + path.build(files) + sdist_path = build_backend.build_sdist("temp") + wheel_file = build_backend.build_wheel("temp") + + with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar: + sdist_contents = set(tar.getnames()) + + with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile: + wheel_contents = set(zipfile.namelist()) + metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8") + license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8") + epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8") + + assert sdist_contents - {"foo-0.1/setup.py"} == { + 'foo-0.1', + 'foo-0.1/LICENSE.txt', + 'foo-0.1/MANIFEST.in', + 'foo-0.1/PKG-INFO', + 'foo-0.1/README.rst', + 'foo-0.1/pyproject.toml', + 'foo-0.1/setup.cfg', + 'foo-0.1/src', + 'foo-0.1/src/foo', + 'foo-0.1/src/foo/__init__.py', + 'foo-0.1/src/foo/cli.py', + 'foo-0.1/src/foo/data.txt', + 'foo-0.1/src/foo.egg-info', + 'foo-0.1/src/foo.egg-info/PKG-INFO', + 'foo-0.1/src/foo.egg-info/SOURCES.txt', + 'foo-0.1/src/foo.egg-info/dependency_links.txt', + 'foo-0.1/src/foo.egg-info/entry_points.txt', + 'foo-0.1/src/foo.egg-info/requires.txt', + 'foo-0.1/src/foo.egg-info/top_level.txt', + 'foo-0.1/src/foo.egg-info/not-zip-safe', + } + assert wheel_contents == { + "foo/__init__.py", + "foo/cli.py", + "foo/data.txt", # include_package_data defaults to True + "foo-0.1.dist-info/LICENSE.txt", + "foo-0.1.dist-info/METADATA", + "foo-0.1.dist-info/WHEEL", + "foo-0.1.dist-info/entry_points.txt", + "foo-0.1.dist-info/top_level.txt", + "foo-0.1.dist-info/RECORD", + } + assert license == "---- placeholder MIT license ----" + for line in ( + "Summary: This is a Python package", + "License: MIT", + "Classifier: Intended Audience :: Developers", + "Requires-Dist: appdirs", + "Requires-Dist: tomli (>=1) ; extra == 'all'", + "Requires-Dist: importlib ; (python_version == \"2.6\") and extra == 'all'" + ): + assert line in metadata + + assert metadata.strip().endswith("This is a ``README``") + assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main" + + def test_static_metadata_in_pyproject_config(self, tmpdir): + # Make sure static metadata in pyproject.toml is not overwritten by setup.py + # as required by PEP 621 + files = { + 'pyproject.toml': DALS(""" + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "foo" + description = "This is a Python package" + version = "42" + dependencies = ["six"] + """), + 'hello.py': DALS(""" + def run(): + print('hello') + """), + 'setup.py': DALS(""" + __import__('setuptools').setup( + name='bar', + version='13', + ) + """), + } + build_backend = self.get_build_backend() + with tmpdir.as_cwd(): + path.build(files) + sdist_path = build_backend.build_sdist("temp") + wheel_file = build_backend.build_wheel("temp") + + assert (tmpdir / "temp/foo-42.tar.gz").exists() + assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists() + assert not (tmpdir / "temp/bar-13.tar.gz").exists() + assert not (tmpdir / "temp/bar-42.tar.gz").exists() + assert not (tmpdir / "temp/foo-13.tar.gz").exists() + assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists() + assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists() + assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists() + + with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar: + pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8") + members = tar.getnames() + assert "bar-13/PKG-INFO" not in members + + with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile: + metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8") + members = zipfile.namelist() + assert "bar-13.dist-info/METADATA" not in members + + for file in pkg_info, metadata: + for line in ("Name: foo", "Version: 42"): + assert line in file + for line in ("Name: bar", "Version: 13"): + assert line not in file + def test_build_sdist(self, build_backend): dist_dir = os.path.abspath('pip-sdist') os.makedirs(dist_dir) @@ -416,6 +662,30 @@ class TestBuildMetaBackend: assert expected == sorted(actual) + def test_setup_requires_with_auto_discovery(self, tmpdir_cwd): + # Make sure patches introduced to retrieve setup_requires don't accidentally + # activate auto-discovery and cause problems due to the incomplete set of + # attributes passed to MinimalDistribution + files = { + 'pyproject.toml': DALS(""" + [project] + name = "proj" + version = "42" + """), + "setup.py": DALS(""" + __import__('setuptools').setup( + setup_requires=["foo"], + py_modules = ["hello", "world"] + ) + """), + 'hello.py': "'hello'", + 'world.py': "'world'", + } + path.build(files) + build_backend = self.get_build_backend() + setup_requires = build_backend.get_requires_for_build_wheel() + assert setup_requires == ["wheel", "foo"] + def test_dont_install_setup_requires(self, tmpdir_cwd): files = { 'setup.py': DALS(""" diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py new file mode 100644 index 00000000..fac365f4 --- /dev/null +++ b/setuptools/tests/test_config_discovery.py @@ -0,0 +1,581 @@ +import os +import sys +from configparser import ConfigParser +from itertools import product + +from setuptools.command.sdist import sdist +from setuptools.dist import Distribution +from setuptools.discovery import find_package_path, find_parent_package +from setuptools.errors import PackageDiscoveryError + +import setuptools # noqa -- force distutils.core to be patched +import distutils.core + +import pytest +import jaraco.path +from path import Path as _Path + +from .contexts import quiet +from .integration.helpers import get_sdist_members, get_wheel_members, run +from .textwrap import DALS + + +class TestFindParentPackage: + def test_single_package(self, tmp_path): + # find_parent_package should find a non-namespace parent package + (tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True) + (tmp_path / "src/namespace/pkg/nested/__init__.py").touch() + (tmp_path / "src/namespace/pkg/__init__.py").touch() + packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"] + assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg" + + def test_multiple_toplevel(self, tmp_path): + # find_parent_package should return null if the given list of packages does not + # have a single parent package + multiple = ["pkg", "pkg1", "pkg2"] + for name in multiple: + (tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True) + (tmp_path / f"src/{name}/__init__.py").touch() + assert find_parent_package(multiple, {"": "src"}, tmp_path) is None + + +class TestDiscoverPackagesAndPyModules: + """Make sure discovered values for ``packages`` and ``py_modules`` work + similarly to explicit configuration for the simple scenarios. + """ + OPTIONS = { + # Different options according to the circumstance being tested + "explicit-src": { + "package_dir": {"": "src"}, + "packages": ["pkg"] + }, + "variation-lib": { + "package_dir": {"": "lib"}, # variation of the source-layout + }, + "explicit-flat": { + "packages": ["pkg"] + }, + "explicit-single_module": { + "py_modules": ["pkg"] + }, + "explicit-namespace": { + "packages": ["ns", "ns.pkg"] + }, + "automatic-src": {}, + "automatic-flat": {}, + "automatic-single_module": {}, + "automatic-namespace": {} + } + FILES = { + "src": ["src/pkg/__init__.py", "src/pkg/main.py"], + "lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"], + "flat": ["pkg/__init__.py", "pkg/main.py"], + "single_module": ["pkg.py"], + "namespace": ["ns/pkg/__init__.py"] + } + + def _get_info(self, circumstance): + _, _, layout = circumstance.partition("-") + files = self.FILES[layout] + options = self.OPTIONS[circumstance] + return files, options + + @pytest.mark.parametrize("circumstance", OPTIONS.keys()) + def test_sdist_filelist(self, tmp_path, circumstance): + files, options = self._get_info(circumstance) + _populate_project_dir(tmp_path, files, options) + + _, cmd = _run_sdist_programatically(tmp_path, options) + + manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files] + for file in files: + assert any(f.endswith(file) for f in manifest) + + @pytest.mark.parametrize("circumstance", OPTIONS.keys()) + def test_project(self, tmp_path, circumstance): + files, options = self._get_info(circumstance) + _populate_project_dir(tmp_path, files, options) + + # Simulate a pre-existing `build` directory + (tmp_path / "build").mkdir() + (tmp_path / "build/lib").mkdir() + (tmp_path / "build/bdist.linux-x86_64").mkdir() + (tmp_path / "build/bdist.linux-x86_64/file.py").touch() + (tmp_path / "build/lib/__init__.py").touch() + (tmp_path / "build/lib/file.py").touch() + (tmp_path / "dist").mkdir() + (tmp_path / "dist/file.py").touch() + + _run_build(tmp_path) + + sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz"))) + print("~~~~~ sdist_members ~~~~~") + print('\n'.join(sdist_files)) + assert sdist_files >= set(files) + + wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl"))) + print("~~~~~ wheel_members ~~~~~") + print('\n'.join(wheel_files)) + orig_files = {f.replace("src/", "").replace("lib/", "") for f in files} + assert wheel_files >= orig_files + + # Make sure build files are not included by mistake + for file in wheel_files: + assert "build" not in files + assert "dist" not in files + + PURPOSEFULLY_EMPY = { + "setup.cfg": DALS( + """ + [metadata] + name = myproj + version = 0.0.0 + + [options] + {param} = + """ + ), + "setup.py": DALS( + """ + __import__('setuptools').setup( + name="myproj", + version="0.0.0", + {param}=[] + ) + """ + ), + "pyproject.toml": DALS( + """ + [build-system] + requires = [] + build-backend = 'setuptools.build_meta' + + [project] + name = "myproj" + version = "0.0.0" + + [tool.setuptools] + {param} = [] + """ + ), + "template-pyproject.toml": DALS( + """ + [build-system] + requires = [] + build-backend = 'setuptools.build_meta' + """ + ) + } + + @pytest.mark.parametrize( + "config_file, param, circumstance", + product( + ["setup.cfg", "setup.py", "pyproject.toml"], + ["packages", "py_modules"], + FILES.keys() + ) + ) + def test_purposefully_empty(self, tmp_path, config_file, param, circumstance): + files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"] + _populate_project_dir(tmp_path, files, {}) + + if config_file == "pyproject.toml": + template_param = param.replace("_", "-") + else: + # Make sure build works with or without setup.cfg + pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"] + (tmp_path / "pyproject.toml").write_text(pyproject) + template_param = param + + config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param) + (tmp_path / config_file).write_text(config) + + dist = _get_dist(tmp_path, {}) + # When either parameter package or py_modules is an empty list, + # then there should be no discovery + assert getattr(dist, param) == [] + other = {"py_modules": "packages", "packages": "py_modules"}[param] + assert getattr(dist, other) is None + + @pytest.mark.parametrize( + "extra_files, pkgs", + [ + (["venv/bin/simulate_venv"], {"pkg"}), + (["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}), + (["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}), + ( + # Type stubs can also be namespaced + ["namespace-stubs/pkg/__init__.pyi"], + {"pkg", "namespace-stubs", "namespace-stubs.pkg"}, + ), + ( + # Just the top-level package can have `-stubs`, ignore nested ones + ["namespace-stubs/pkg-stubs/__init__.pyi"], + {"pkg", "namespace-stubs"} + ), + (["_hidden/file.py"], {"pkg"}), + (["news/finalize.py"], {"pkg"}), + ] + ) + def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs): + files = self.FILES["flat"] + extra_files + _populate_project_dir(tmp_path, files, {}) + dist = _get_dist(tmp_path, {}) + assert set(dist.packages) == pkgs + + @pytest.mark.parametrize( + "extra_files", + [ + ["other/__init__.py"], + ["other/finalize.py"], + ] + ) + def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files): + files = self.FILES["flat"] + extra_files + _populate_project_dir(tmp_path, files, {}) + with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): + _get_dist(tmp_path, {}) + + def test_flat_layout_with_single_module(self, tmp_path): + files = self.FILES["single_module"] + ["invalid-module-name.py"] + _populate_project_dir(tmp_path, files, {}) + dist = _get_dist(tmp_path, {}) + assert set(dist.py_modules) == {"pkg"} + + def test_flat_layout_with_multiple_modules(self, tmp_path): + files = self.FILES["single_module"] + ["valid_module_name.py"] + _populate_project_dir(tmp_path, files, {}) + with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): + _get_dist(tmp_path, {}) + + +class TestNoConfig: + DEFAULT_VERSION = "0.0.0" # Default version given by setuptools + + EXAMPLES = { + "pkg1": ["src/pkg1.py"], + "pkg2": ["src/pkg2/__init__.py"], + "pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"], + "pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"], + "ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"], + "ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"], + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_discover_name(self, tmp_path, example): + _populate_project_dir(tmp_path, self.EXAMPLES[example], {}) + dist = _get_dist(tmp_path, {}) + assert dist.get_name() == example + + def test_build_with_discovered_name(self, tmp_path): + files = ["src/ns/nested/pkg/__init__.py"] + _populate_project_dir(tmp_path, files, {}) + _run_build(tmp_path, "--sdist") + # Expected distribution file + dist_file = tmp_path / f"dist/ns.nested.pkg-{self.DEFAULT_VERSION}.tar.gz" + assert dist_file.is_file() + + +class TestWithAttrDirective: + @pytest.mark.parametrize( + "folder, opts", + [ + ("src", {}), + ("lib", {"packages": "find:", "packages.find": {"where": "lib"}}), + ] + ) + def test_setupcfg_metadata(self, tmp_path, folder, opts): + files = [f"{folder}/pkg/__init__.py", "setup.cfg"] + _populate_project_dir(tmp_path, files, opts) + (tmp_path / folder / "pkg/__init__.py").write_text("version = 42") + (tmp_path / "setup.cfg").write_text( + "[metadata]\nversion = attr: pkg.version\n" + + (tmp_path / "setup.cfg").read_text() + ) + + dist = _get_dist(tmp_path, {}) + assert dist.get_name() == "pkg" + assert dist.get_version() == "42" + assert dist.package_dir + package_path = find_package_path("pkg", dist.package_dir, tmp_path) + assert os.path.exists(package_path) + assert folder in _Path(package_path).parts() + + _run_build(tmp_path, "--sdist") + dist_file = tmp_path / "dist/pkg-42.tar.gz" + assert dist_file.is_file() + + def test_pyproject_metadata(self, tmp_path): + _populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {}) + (tmp_path / "src/pkg/__init__.py").write_text("version = 42") + (tmp_path / "pyproject.toml").write_text( + "[project]\nname = 'pkg'\ndynamic = ['version']\n" + "[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n" + ) + dist = _get_dist(tmp_path, {}) + assert dist.get_version() == "42" + assert dist.package_dir == {"": "src"} + + +class TestWithCExtension: + def _simulate_package_with_extension(self, tmp_path): + # This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0 + files = [ + "benchmarks/file.py", + "docs/Makefile", + "docs/requirements.txt", + "docs/source/conf.py", + "proj/header.h", + "proj/file.py", + "py/proj.cpp", + "py/other.cpp", + "py/file.py", + "py/py.typed", + "py/tests/test_proj.py", + "README.rst", + ] + _populate_project_dir(tmp_path, files, {}) + + setup_script = """ + from setuptools import Extension, setup + + ext_modules = [ + Extension( + "proj", + ["py/proj.cpp", "py/other.cpp"], + include_dirs=["."], + language="c++", + ), + ] + setup(ext_modules=ext_modules) + """ + (tmp_path / "setup.py").write_text(DALS(setup_script)) + + def test_skip_discovery_with_setupcfg_metadata(self, tmp_path): + """Ensure that auto-discovery is not triggered when the project is based on + C-extensions only, for backward compatibility. + """ + self._simulate_package_with_extension(tmp_path) + + pyproject = """ + [build-system] + requires = [] + build-backend = 'setuptools.build_meta' + """ + (tmp_path / "pyproject.toml").write_text(DALS(pyproject)) + + setupcfg = """ + [metadata] + name = proj + version = 42 + """ + (tmp_path / "setup.cfg").write_text(DALS(setupcfg)) + + dist = _get_dist(tmp_path, {}) + assert dist.get_name() == "proj" + assert dist.get_version() == "42" + assert dist.py_modules is None + assert dist.packages is None + assert len(dist.ext_modules) == 1 + assert dist.ext_modules[0].name == "proj" + + def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path): + """When opting-in to pyproject.toml metadata, auto-discovery will be active if + the package lists C-extensions, but does not configure py-modules or packages. + + This way we ensure users with complex package layouts that would lead to the + discovery of multiple top-level modules/packages see errors and are forced to + explicitly set ``packages`` or ``py-modules``. + """ + self._simulate_package_with_extension(tmp_path) + + pyproject = """ + [project] + name = 'proj' + version = '42' + """ + (tmp_path / "pyproject.toml").write_text(DALS(pyproject)) + with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): + _get_dist(tmp_path, {}) + + +class TestWithPackageData: + def _simulate_package_with_data_files(self, tmp_path, src_root): + files = [ + f"{src_root}/proj/__init__.py", + f"{src_root}/proj/file1.txt", + f"{src_root}/proj/nested/file2.txt", + ] + _populate_project_dir(tmp_path, files, {}) + + manifest = """ + global-include *.py *.txt + """ + (tmp_path / "MANIFEST.in").write_text(DALS(manifest)) + + EXAMPLE_SETUPCFG = """ + [metadata] + name = proj + version = 42 + + [options] + include_package_data = True + """ + EXAMPLE_PYPROJECT = """ + [project] + name = "proj" + version = "42" + """ + + PYPROJECT_PACKAGE_DIR = """ + [tool.setuptools] + package-dir = {"" = "src"} + """ + + @pytest.mark.parametrize( + "src_root, files", + [ + (".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}), + (".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}), + ("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}), + ("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}), + ( + "src", + { + "setup.cfg": DALS(EXAMPLE_SETUPCFG) + DALS( + """ + packages = find: + package_dir = + =src + + [options.packages.find] + where = src + """ + ) + } + ), + ( + "src", + { + "pyproject.toml": DALS(EXAMPLE_PYPROJECT) + DALS( + """ + [tool.setuptools] + package-dir = {"" = "src"} + """ + ) + }, + ), + ] + ) + def test_include_package_data(self, tmp_path, src_root, files): + """ + Make sure auto-discovery does not affect package include_package_data. + See issue #3196. + """ + jaraco.path.build(files, prefix=str(tmp_path)) + self._simulate_package_with_data_files(tmp_path, src_root) + + expected = { + os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"), + os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"), + } + + _run_build(tmp_path) + + sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz"))) + print("~~~~~ sdist_members ~~~~~") + print('\n'.join(sdist_files)) + assert sdist_files >= expected + + wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl"))) + print("~~~~~ wheel_members ~~~~~") + print('\n'.join(wheel_files)) + orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected} + assert wheel_files >= orig_files + + +def test_compatible_with_numpy_configuration(tmp_path): + files = [ + "dir1/__init__.py", + "dir2/__init__.py", + "file.py", + ] + _populate_project_dir(tmp_path, files, {}) + dist = Distribution({}) + dist.configuration = object() + dist.set_defaults() + assert dist.py_modules is None + assert dist.packages is None + + +def _populate_project_dir(root, files, options): + # NOTE: Currently pypa/build will refuse to build the project if no + # `pyproject.toml` or `setup.py` is found. So it is impossible to do + # completely "config-less" projects. + (root / "setup.py").write_text("import setuptools\nsetuptools.setup()") + (root / "README.md").write_text("# Example Package") + (root / "LICENSE").write_text("Copyright (c) 2018") + _write_setupcfg(root, options) + paths = (root / f for f in files) + for path in paths: + path.parent.mkdir(exist_ok=True, parents=True) + path.touch() + + +def _write_setupcfg(root, options): + if not options: + print("~~~~~ **NO** setup.cfg ~~~~~") + return + setupcfg = ConfigParser() + setupcfg.add_section("options") + for key, value in options.items(): + if key == "packages.find": + setupcfg.add_section(f"options.{key}") + setupcfg[f"options.{key}"].update(value) + elif isinstance(value, list): + setupcfg["options"][key] = ", ".join(value) + elif isinstance(value, dict): + str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items()) + setupcfg["options"][key] = "\n" + str_value + else: + setupcfg["options"][key] = str(value) + with open(root / "setup.cfg", "w") as f: + setupcfg.write(f) + print("~~~~~ setup.cfg ~~~~~") + print((root / "setup.cfg").read_text()) + + +def _run_build(path, *flags): + cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)] + return run(cmd, env={'DISTUTILS_DEBUG': ''}) + + +def _get_dist(dist_path, attrs): + root = "/".join(os.path.split(dist_path)) # POSIX-style + + script = dist_path / 'setup.py' + if script.exists(): + with _Path(dist_path): + dist = distutils.core.run_setup("setup.py", {}, stop_after="init") + else: + dist = Distribution(attrs) + + dist.src_root = root + dist.script_name = "setup.py" + with _Path(dist_path): + dist.parse_config_files() + + dist.set_defaults() + return dist + + +def _run_sdist_programatically(dist_path, attrs): + dist = _get_dist(dist_path, attrs) + cmd = sdist(dist) + cmd.ensure_finalized() + assert cmd.distribution.packages or cmd.distribution.py_modules + + with quiet(), _Path(dist_path): + cmd.run() + + return dist, cmd diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py index 4980f2c3..e7d2f5ca 100644 --- a/setuptools/tests/test_dist.py +++ b/setuptools/tests/test_dist.py @@ -2,6 +2,7 @@ import io import collections import re import functools +import os import urllib.request import urllib.parse from distutils.errors import DistutilsSetupError @@ -18,6 +19,7 @@ from setuptools import Distribution from .textwrap import DALS from .test_easy_install import make_nspkg_sdist +from .test_find_packages import ensure_files import pytest @@ -69,16 +71,19 @@ def test_dist__get_unpatched_deprecated(): pytest.warns(DistDeprecationWarning, _get_unpatched, [""]) +EXAMPLE_BASE_INFO = dict( + name="package", + version="0.0.1", + author="Foo Bar", + author_email="foo@bar.net", + long_description="Long\ndescription", + description="Short description", + keywords=["one", "two"], +) + + def __read_test_cases(): - base = dict( - name="package", - version="0.0.1", - author="Foo Bar", - author_email="foo@bar.net", - long_description="Long\ndescription", - description="Short description", - keywords=["one", "two"], - ) + base = EXAMPLE_BASE_INFO params = functools.partial(dict, base) @@ -379,3 +384,126 @@ def test_rfc822_unescape(content, result): def test_metadata_name(): with pytest.raises(DistutilsSetupError, match='missing.*name'): Distribution()._validate_metadata() + + +@pytest.mark.parametrize( + "dist_name, py_module", + [ + ("my.pkg", "my_pkg"), + ("my-pkg", "my_pkg"), + ("my_pkg", "my_pkg"), + ("pkg", "pkg"), + ] +) +def test_dist_default_py_modules(tmp_path, dist_name, py_module): + (tmp_path / f"{py_module}.py").touch() + + (tmp_path / "setup.py").touch() + (tmp_path / "noxfile.py").touch() + # ^-- make sure common tool files are ignored + + attrs = { + **EXAMPLE_BASE_INFO, + "name": dist_name, + "src_root": str(tmp_path) + } + # Find `py_modules` corresponding to dist_name if not given + dist = Distribution(attrs) + dist.set_defaults() + assert dist.py_modules == [py_module] + # When `py_modules` is given, don't do anything + dist = Distribution({**attrs, "py_modules": ["explicity_py_module"]}) + dist.set_defaults() + assert dist.py_modules == ["explicity_py_module"] + # When `packages` is given, don't do anything + dist = Distribution({**attrs, "packages": ["explicity_package"]}) + dist.set_defaults() + assert not dist.py_modules + + +@pytest.mark.parametrize( + "dist_name, package_dir, package_files, packages", + [ + ("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]), + ("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]), + ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]), + ("my.pkg", None, ["my/pkg/__init__.py"], ["my", "my.pkg"]), + ( + "my_pkg", + None, + ["src/my_pkg/__init__.py", "src/my_pkg2/__init__.py"], + ["my_pkg", "my_pkg2"] + ), + ( + "my_pkg", + {"pkg": "lib", "pkg2": "lib2"}, + ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"], + ["pkg", "pkg.nested", "pkg2"] + ), + ] +) +def test_dist_default_packages( + tmp_path, dist_name, package_dir, package_files, packages +): + ensure_files(tmp_path, package_files) + + (tmp_path / "setup.py").touch() + (tmp_path / "noxfile.py").touch() + # ^-- should not be included by default + + attrs = { + **EXAMPLE_BASE_INFO, + "name": dist_name, + "src_root": str(tmp_path), + "package_dir": package_dir + } + # Find `packages` either corresponding to dist_name or inside src + dist = Distribution(attrs) + dist.set_defaults() + assert not dist.py_modules + assert not dist.py_modules + assert set(dist.packages) == set(packages) + # When `py_modules` is given, don't do anything + dist = Distribution({**attrs, "py_modules": ["explicit_py_module"]}) + dist.set_defaults() + assert not dist.packages + assert set(dist.py_modules) == {"explicit_py_module"} + # When `packages` is given, don't do anything + dist = Distribution({**attrs, "packages": ["explicit_package"]}) + dist.set_defaults() + assert not dist.py_modules + assert set(dist.packages) == {"explicit_package"} + + +@pytest.mark.parametrize( + "dist_name, package_dir, package_files", + [ + ("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]), + ("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]), + ("my_pkg", None, ["my_pkg.py"]), + ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/nested/__init__.py"]), + ("my_pkg", None, ["src/my_pkg/__init__.py", "src/my_pkg/nested/__init__.py"]), + ( + "my_pkg", + {"my_pkg": "lib", "my_pkg.lib2": "lib2"}, + ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"], + ), + # Should not try to guess a name from multiple py_modules/packages + ("UNKNOWN", None, ["src/mod1.py", "src/mod2.py"]), + ("UNKNOWN", None, ["src/pkg1/__ini__.py", "src/pkg2/__init__.py"]), + ] +) +def test_dist_default_name(tmp_path, dist_name, package_dir, package_files): + """Make sure dist.name is discovered from packages/py_modules""" + ensure_files(tmp_path, package_files) + attrs = { + **EXAMPLE_BASE_INFO, + "src_root": "/".join(os.path.split(tmp_path)), # POSIX-style + "package_dir": package_dir + } + del attrs["name"] + + dist = Distribution(attrs) + dist.set_defaults() + assert dist.py_modules or dist.packages + assert dist.get_name() == dist_name diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index 878eb7c3..85f528db 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -449,6 +449,63 @@ class TestDistutilsPackage: run_setup('setup.py', ['bdist_egg']) +class TestInstallRequires: + def test_setup_install_includes_dependencies(self, tmp_path, mock_index): + """ + When ``python setup.py install`` is called directly, it will use easy_install + to fetch dependencies. + """ + # TODO: Remove these tests once `setup.py install` is completely removed + project_root = tmp_path / "project" + project_root.mkdir(exist_ok=True) + install_root = tmp_path / "install" + install_root.mkdir(exist_ok=True) + + self.create_project(project_root) + cmd = [ + sys.executable, + '-c', '__import__("setuptools").setup()', + 'install', + '--install-base', str(install_root), + '--install-lib', str(install_root), + '--install-headers', str(install_root), + '--install-scripts', str(install_root), + '--install-data', str(install_root), + '--install-purelib', str(install_root), + '--install-platlib', str(install_root), + ] + env = {"PYTHONPATH": str(install_root), "__EASYINSTALL_INDEX": mock_index.url} + with pytest.raises(subprocess.CalledProcessError) as exc_info: + subprocess.check_output( + cmd, cwd=str(project_root), env=env, stderr=subprocess.STDOUT, text=True + ) + try: + assert '/does-not-exist/' in {r.path for r in mock_index.requests} + assert next( + line + for line in exc_info.value.output.splitlines() + if "not find suitable distribution for" in line + and "does-not-exist" in line + ) + except Exception: + if "failed to get random numbers" in exc_info.value.output: + pytest.xfail(f"{sys.platform} failure - {exc_info.value.output}") + raise + + def create_project(self, root): + config = """ + [metadata] + name = project + version = 42 + + [options] + install_requires = does-not-exist + py_modules = mod + """ + (root / 'setup.cfg').write_text(DALS(config), encoding="utf-8") + (root / 'mod.py').touch() + + class TestSetupRequires: def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch): @@ -467,7 +524,7 @@ class TestSetupRequires: with contexts.environment(PYTHONPATH=temp_install_dir): cmd = [ sys.executable, - '-m', 'setup', + '-c', '__import__("setuptools").setup()', 'easy_install', '--index-url', mock_index.url, '--exclude-scripts', diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py new file mode 100644 index 00000000..aac4f5ee --- /dev/null +++ b/setuptools/tests/test_editable_install.py @@ -0,0 +1,113 @@ +import subprocess +from textwrap import dedent + +import pytest +import jaraco.envs +import path + + +@pytest.fixture +def venv(tmp_path, setuptools_wheel): + env = jaraco.envs.VirtualEnv() + vars(env).update( + root=path.Path(tmp_path), # workaround for error on windows + name=".venv", + create_opts=["--no-setuptools"], + req=str(setuptools_wheel), + ) + return env.create() + + +EXAMPLE = { + 'pyproject.toml': dedent("""\ + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "mypkg" + version = "3.14159" + license = {text = "MIT"} + description = "This is a Python package" + dynamic = ["readme"] + classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers" + ] + urls = {Homepage = "http://github.com"} + dependencies = ['importlib-metadata; python_version<"3.8"'] + + [tool.setuptools] + package-dir = {"" = "src"} + packages = {find = {where = ["src"]}} + license-files = ["LICENSE*"] + + [tool.setuptools.dynamic] + readme = {file = "README.rst"} + + [tool.distutils.egg_info] + tag-build = ".post0" + """), + "MANIFEST.in": dedent("""\ + global-include *.py *.txt + global-exclude *.py[cod] + """).strip(), + "README.rst": "This is a ``README``", + "LICENSE.txt": "---- placeholder MIT license ----", + "src": { + "mypkg": { + "__init__.py": dedent("""\ + import sys + + if sys.version_info[:2] >= (3, 8): + from importlib.metadata import PackageNotFoundError, version + else: + from importlib_metadata import PackageNotFoundError, version + + try: + __version__ = version(__name__) + except PackageNotFoundError: + __version__ = "unknown" + """), + "__main__.py": dedent("""\ + from importlib.resources import read_text + from . import __version__, __name__ as parent + from .mod import x + + data = read_text(parent, "data.txt") + print(__version__, data, x) + """), + "mod.py": "x = ''", + "data.txt": "Hello World", + } + } +} + + +SETUP_SCRIPT_STUB = "__import__('setuptools').setup()" +MISSING_SETUP_SCRIPT = pytest.param( + None, + marks=pytest.mark.xfail( + reason="Editable install is currently only supported with `setup.py`" + ) +) + + +@pytest.mark.parametrize("setup_script", [SETUP_SCRIPT_STUB, MISSING_SETUP_SCRIPT]) +def test_editable_with_pyproject(tmp_path, venv, setup_script): + project = tmp_path / "mypkg" + files = {**EXAMPLE, "setup.py": setup_script} + project.mkdir() + jaraco.path.build(files, prefix=project) + + cmd = [venv.exe(), "-m", "pip", "install", + "--no-build-isolation", # required to force current version of setuptools + "-e", str(project)] + print(str(subprocess.check_output(cmd), "utf-8")) + + cmd = [venv.exe(), "-m", "mypkg"] + assert subprocess.check_output(cmd).strip() == b"3.14159.post0 Hello World" + + (project / "src/mypkg/data.txt").write_text("foobar") + (project / "src/mypkg/mod.py").write_text("x = 42") + assert subprocess.check_output(cmd).strip() == b"3.14159.post0 foobar 42" diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py index 906713f6..efcce924 100644 --- a/setuptools/tests/test_find_packages.py +++ b/setuptools/tests/test_find_packages.py @@ -1,4 +1,4 @@ -"""Tests for setuptools.find_packages().""" +"""Tests for automatic package discovery""" import os import sys import shutil @@ -9,6 +9,7 @@ import pytest from setuptools import find_packages from setuptools import find_namespace_packages +from setuptools.discovery import FlatLayoutPackageFinder # modeled after CPython's test.support.can_symlink @@ -178,3 +179,67 @@ class TestFindPackages: shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) packages = find_namespace_packages(self.dist_dir) self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + +class TestFlatLayoutPackageFinder: + EXAMPLES = { + "hidden-folders": ( + [".pkg/__init__.py", "pkg/__init__.py", "pkg/nested/file.txt"], + ["pkg", "pkg.nested"] + ), + "private-packages": ( + ["_pkg/__init__.py", "pkg/_private/__init__.py"], + ["pkg", "pkg._private"] + ), + "invalid-name": ( + ["invalid-pkg/__init__.py", "other.pkg/__init__.py", "yet,another/file.py"], + [] + ), + "docs": ( + ["pkg/__init__.py", "docs/conf.py", "docs/readme.rst"], + ["pkg"] + ), + "tests": ( + ["pkg/__init__.py", "tests/test_pkg.py", "tests/__init__.py"], + ["pkg"] + ), + "examples": ( + [ + "pkg/__init__.py", + "examples/__init__.py", + "examples/file.py" + "example/other_file.py", + # Sub-packages should always be fine + "pkg/example/__init__.py", + "pkg/examples/__init__.py", + ], + ["pkg", "pkg.examples", "pkg.example"] + ), + "tool-specific": ( + [ + "pkg/__init__.py", + "tasks/__init__.py", + "tasks/subpackage/__init__.py", + "fabfile/__init__.py", + "fabfile/subpackage/__init__.py", + # Sub-packages should always be fine + "pkg/tasks/__init__.py", + "pkg/fabfile/__init__.py", + ], + ["pkg", "pkg.tasks", "pkg.fabfile"] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_unwanted_directories_not_included(self, tmp_path, example): + files, expected_packages = self.EXAMPLES[example] + ensure_files(tmp_path, files) + found_packages = FlatLayoutPackageFinder.find(str(tmp_path)) + assert set(found_packages) == set(expected_packages) + + +def ensure_files(root_path, files): + for file in files: + path = root_path / file + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py new file mode 100644 index 00000000..4ef68801 --- /dev/null +++ b/setuptools/tests/test_find_py_modules.py @@ -0,0 +1,81 @@ +"""Tests for automatic discovery of modules""" +import os + +import pytest + +from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder + +from .test_find_packages import ensure_files, has_symlink + + +class TestModuleFinder: + def find(self, path, *args, **kwargs): + return set(ModuleFinder.find(str(path), *args, **kwargs)) + + EXAMPLES = { + # circumstance: (files, kwargs, expected_modules) + "simple_folder": ( + ["file.py", "other.py"], + {}, # kwargs + ["file", "other"], + ), + "exclude": ( + ["file.py", "other.py"], + {"exclude": ["f*"]}, + ["other"], + ), + "include": ( + ["file.py", "fole.py", "other.py"], + {"include": ["f*"], "exclude": ["fo*"]}, + ["file"], + ), + "invalid-name": ( + ["my-file.py", "other.file.py"], + {}, + [] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_finder(self, tmp_path, example): + files, kwargs, expected_modules = self.EXAMPLES[example] + ensure_files(tmp_path, files) + assert self.find(tmp_path, **kwargs) == set(expected_modules) + + @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') + def test_symlinked_packages_are_included(self, tmp_path): + src = "_myfiles/file.py" + ensure_files(tmp_path, [src]) + os.symlink(tmp_path / src, tmp_path / "link.py") + assert self.find(tmp_path) == {"link"} + + +class TestFlatLayoutModuleFinder: + def find(self, path, *args, **kwargs): + return set(FlatLayoutModuleFinder.find(str(path))) + + EXAMPLES = { + # circumstance: (files, expected_modules) + "hidden-files": ( + [".module.py"], + [] + ), + "private-modules": ( + ["_module.py"], + [] + ), + "common-names": ( + ["setup.py", "conftest.py", "test.py", "tests.py", "example.py", "mod.py"], + ["mod"] + ), + "tool-specific": ( + ["tasks.py", "fabfile.py", "noxfile.py", "dodo.py", "manage.py", "mod.py"], + ["mod"] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_unwanted_files_not_included(self, tmp_path, example): + files, expected_modules = self.EXAMPLES[example] + ensure_files(tmp_path, files) + assert self.find(tmp_path) == set(expected_modules) diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py index b97faf17..0640f49d 100644 --- a/setuptools/tests/test_setuptools.py +++ b/setuptools/tests/test_setuptools.py @@ -303,3 +303,8 @@ def test_its_own_wheel_does_not_contain_tests(setuptools_wheel): for member in contents: assert '/tests/' not in member + + +def test_convert_path_deprecated(): + with pytest.warns(setuptools.SetuptoolsDeprecationWarning): + setuptools.convert_path('setuptools/tests') diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py index 0ba89643..65358543 100644 --- a/setuptools/tests/test_virtualenv.py +++ b/setuptools/tests/test_virtualenv.py @@ -1,7 +1,8 @@ import os import sys -import itertools import subprocess +from urllib.request import urlopen +from urllib.error import URLError import pathlib @@ -31,56 +32,39 @@ def test_clean_env_install(venv_without_setuptools, setuptools_wheel): venv_without_setuptools.run(cmd) -def _get_pip_versions(): - # This fixture will attempt to detect if tests are being run without - # network connectivity and if so skip some tests - - network = True +def access_pypi(): + # Detect if tests are being run without connectivity if not os.environ.get('NETWORK_REQUIRED', False): # pragma: nocover try: - from urllib.request import urlopen - from urllib.error import URLError - except ImportError: - from urllib2 import urlopen, URLError # Python 2.7 compat - - try: urlopen('https://pypi.org', timeout=1) except URLError: # No network, disable most of these tests - network = False + return False - def mark(param, *marks): - if not isinstance(param, type(pytest.param(''))): - param = pytest.param(param) - return param._replace(marks=param.marks + marks) + return True - def skip_network(param): - return param if network else mark(param, pytest.mark.skip(reason="no network")) - network_versions = [ - mark('pip<20', pytest.mark.xfail(reason='pypa/pip#6599')), +@pytest.mark.skipif( + 'platform.python_implementation() == "PyPy"', + reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995", +) +@pytest.mark.skipif(not access_pypi(), reason="no network") +# ^-- Even when it is not necessary to install a different version of `pip` +# the build process will still try to download `wheel`, see #3147 and #2986. +@pytest.mark.parametrize( + 'pip_version', + [ + None, + pytest.param('pip<20', marks=pytest.mark.xfail(reason='pypa/pip#6599')), 'pip<20.1', 'pip<21', 'pip<22', - mark( + pytest.param( 'https://github.com/pypa/pip/archive/main.zip', - pytest.mark.xfail(reason='#2975'), + marks=pytest.mark.xfail(reason='#2975'), ), ] - - versions = itertools.chain( - [None], - map(skip_network, network_versions) - ) - - return list(versions) - - -@pytest.mark.skipif( - 'platform.python_implementation() == "PyPy"', - reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995", ) -@pytest.mark.parametrize('pip_version', _get_pip_versions()) def test_pip_upgrade_from_source(pip_version, venv_without_setuptools, setuptools_wheel, setuptools_sdist): """ diff --git a/setuptools/windows_support.py b/setuptools/windows_support.py index cb977cff..1ca64fbb 100644 --- a/setuptools/windows_support.py +++ b/setuptools/windows_support.py @@ -1,5 +1,4 @@ import platform -import ctypes def windows_only(func): @@ -17,6 +16,7 @@ def hide_file(path): `path` must be text. """ + import ctypes __import__('ctypes.wintypes') SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD |
