diff options
Diffstat (limited to 'setuptools/tests')
37 files changed, 3370 insertions, 342 deletions
diff --git a/setuptools/tests/config/__init__.py b/setuptools/tests/config/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/tests/config/__init__.py diff --git a/setuptools/tests/config/downloads/.gitignore b/setuptools/tests/config/downloads/.gitignore new file mode 100644 index 00000000..df3779fc --- /dev/null +++ b/setuptools/tests/config/downloads/.gitignore @@ -0,0 +1,4 @@ +* +!.gitignore +!__init__.py +!preload.py diff --git a/setuptools/tests/config/downloads/__init__.py b/setuptools/tests/config/downloads/__init__.py new file mode 100644 index 00000000..9fb9b14b --- /dev/null +++ b/setuptools/tests/config/downloads/__init__.py @@ -0,0 +1,57 @@ +import re +import time +from pathlib import Path +from urllib.error import HTTPError +from urllib.request import urlopen + +__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"] + + +NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/") +DOWNLOAD_DIR = Path(__file__).parent + + +# ---------------------------------------------------------------------- +# Please update ./preload.py accordingly when modifying this file +# ---------------------------------------------------------------------- + + +def output_file(url: str, download_dir: Path = DOWNLOAD_DIR): + file_name = url.strip() + for part in NAME_REMOVE: + file_name = file_name.replace(part, '').strip().strip('/:').strip() + return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name)) + + +def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5): + path = output_file(url, download_dir) + if path.exists(): + print(f"Skipping {url} (already exists: {path})") + else: + download_dir.mkdir(exist_ok=True, parents=True) + print(f"Downloading {url} to {path}") + try: + download(url, path) + except HTTPError: + time.sleep(wait) # wait a few seconds and try again. + download(url, path) + return path + + +def urls_from_file(list_file: Path): + """``list_file`` should be a text file where each line corresponds to a URL to + download. + """ + print(f"file: {list_file}") + content = list_file.read_text(encoding="utf-8") + return [url for url in content.splitlines() if not url.startswith("#")] + + +def download(url: str, dest: Path): + with urlopen(url) as f: + data = f.read() + + with open(dest, "wb") as f: + f.write(data) + + assert Path(dest).exists() diff --git a/setuptools/tests/config/downloads/preload.py b/setuptools/tests/config/downloads/preload.py new file mode 100644 index 00000000..64b3f1c8 --- /dev/null +++ b/setuptools/tests/config/downloads/preload.py @@ -0,0 +1,18 @@ +"""This file can be used to preload files needed for testing. + +For example you can use:: + + cd setuptools/tests/config + python -m downloads.preload setupcfg_examples.txt + +to make sure the `setup.cfg` examples are downloaded before starting the tests. +""" +import sys +from pathlib import Path + +from . import retrieve_file, urls_from_file + + +if __name__ == "__main__": + urls = urls_from_file(Path(sys.argv[1])) + list(map(retrieve_file, urls)) diff --git a/setuptools/tests/config/setupcfg_examples.txt b/setuptools/tests/config/setupcfg_examples.txt new file mode 100644 index 00000000..5db35654 --- /dev/null +++ b/setuptools/tests/config/setupcfg_examples.txt @@ -0,0 +1,23 @@ +# ==================================================================== +# Some popular packages that use setup.cfg (and others not so popular) +# Reference: https://hugovk.github.io/top-pypi-packages/ +# ==================================================================== +https://github.com/pypa/setuptools/raw/52c990172fec37766b3566679724aa8bf70ae06d/setup.cfg +https://github.com/pypa/wheel/raw/0acd203cd896afec7f715aa2ff5980a403459a3b/setup.cfg +https://github.com/python/importlib_metadata/raw/2f05392ca980952a6960d82b2f2d2ea10aa53239/setup.cfg +https://github.com/jaraco/skeleton/raw/d9008b5c510cd6969127a6a2ab6f832edddef296/setup.cfg +https://github.com/jaraco/zipp/raw/700d3a96390e970b6b962823bfea78b4f7e1c537/setup.cfg +https://github.com/pallets/jinja/raw/7d72eb7fefb7dce065193967f31f805180508448/setup.cfg +https://github.com/tkem/cachetools/raw/2fd87a94b8d3861d80e9e4236cd480bfdd21c90d/setup.cfg +https://github.com/aio-libs/aiohttp/raw/5e0e6b7080f2408d5f1dd544c0e1cf88378b7b10/setup.cfg +https://github.com/pallets/flask/raw/9486b6cf57bd6a8a261f67091aca8ca78eeec1e3/setup.cfg +https://github.com/pallets/click/raw/6411f425fae545f42795665af4162006b36c5e4a/setup.cfg +https://github.com/sqlalchemy/sqlalchemy/raw/533f5718904b620be8d63f2474229945d6f8ba5d/setup.cfg +https://github.com/pytest-dev/pluggy/raw/461ef63291d13589c4e21aa182cd1529257e9a0a/setup.cfg +https://github.com/pytest-dev/pytest/raw/c7be96dae487edbd2f55b561b31b68afac1dabe6/setup.cfg +https://github.com/tqdm/tqdm/raw/fc69d5dcf578f7c7986fa76841a6b793f813df35/setup.cfg +https://github.com/platformdirs/platformdirs/raw/7b7852128dd6f07511b618d6edea35046bd0c6ff/setup.cfg +https://github.com/pandas-dev/pandas/raw/bc17343f934a33dc231c8c74be95d8365537c376/setup.cfg +https://github.com/django/django/raw/4e249d11a6e56ca8feb4b055b681cec457ef3a3d/setup.cfg +https://github.com/pyscaffold/pyscaffold/raw/de7aa5dc059fbd04307419c667cc4961bc9df4b8/setup.cfg +https://github.com/pypa/virtualenv/raw/f92eda6e3da26a4d28c2663ffb85c4960bdb990c/setup.cfg diff --git a/setuptools/tests/config/test_apply_pyprojecttoml.py b/setuptools/tests/config/test_apply_pyprojecttoml.py new file mode 100644 index 00000000..4f541697 --- /dev/null +++ b/setuptools/tests/config/test_apply_pyprojecttoml.py @@ -0,0 +1,323 @@ +"""Make sure that applying the configuration from pyproject.toml is equivalent to +applying a similar configuration from setup.cfg + +To run these tests offline, please have a look on ``./downloads/preload.py`` +""" +import io +import re +import tarfile +from pathlib import Path +from unittest.mock import Mock +from zipfile import ZipFile + +import pytest +from ini2toml.api import Translator + +import setuptools # noqa ensure monkey patch to metadata +from setuptools.dist import Distribution +from setuptools.config import setupcfg, pyprojecttoml +from setuptools.config import expand +from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField, _some_attrgetter +from setuptools.command.egg_info import write_requirements + +from .downloads import retrieve_file, urls_from_file + + +HERE = Path(__file__).parent +EXAMPLES_FILE = "setupcfg_examples.txt" + + +def makedist(path, **attrs): + return Distribution({"src_root": path, **attrs}) + + +@pytest.mark.parametrize("url", urls_from_file(HERE / EXAMPLES_FILE)) +@pytest.mark.filterwarnings("ignore") +@pytest.mark.uses_network +def test_apply_pyproject_equivalent_to_setupcfg(url, monkeypatch, tmp_path): + monkeypatch.setattr(expand, "read_attr", Mock(return_value="0.0.1")) + setupcfg_example = retrieve_file(url) + pyproject_example = Path(tmp_path, "pyproject.toml") + toml_config = Translator().translate(setupcfg_example.read_text(), "setup.cfg") + pyproject_example.write_text(toml_config) + + dist_toml = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject_example) + dist_cfg = setupcfg.apply_configuration(makedist(tmp_path), setupcfg_example) + + pkg_info_toml = core_metadata(dist_toml) + pkg_info_cfg = core_metadata(dist_cfg) + assert pkg_info_toml == pkg_info_cfg + + if any(getattr(d, "license_files", None) for d in (dist_toml, dist_cfg)): + assert set(dist_toml.license_files) == set(dist_cfg.license_files) + + if any(getattr(d, "entry_points", None) for d in (dist_toml, dist_cfg)): + print(dist_cfg.entry_points) + ep_toml = {(k, *sorted(i.replace(" ", "") for i in v)) + for k, v in dist_toml.entry_points.items()} + ep_cfg = {(k, *sorted(i.replace(" ", "") for i in v)) + for k, v in dist_cfg.entry_points.items()} + assert ep_toml == ep_cfg + + if any(getattr(d, "package_data", None) for d in (dist_toml, dist_cfg)): + pkg_data_toml = {(k, *sorted(v)) for k, v in dist_toml.package_data.items()} + pkg_data_cfg = {(k, *sorted(v)) for k, v in dist_cfg.package_data.items()} + assert pkg_data_toml == pkg_data_cfg + + if any(getattr(d, "data_files", None) for d in (dist_toml, dist_cfg)): + data_files_toml = {(k, *sorted(v)) for k, v in dist_toml.data_files} + data_files_cfg = {(k, *sorted(v)) for k, v in dist_cfg.data_files} + assert data_files_toml == data_files_cfg + + assert set(dist_toml.install_requires) == set(dist_cfg.install_requires) + if any(getattr(d, "extras_require", None) for d in (dist_toml, dist_cfg)): + if ( + "testing" in dist_toml.extras_require + and "testing" not in dist_cfg.extras_require + ): + # ini2toml can automatically convert `tests_require` to `testing` extra + dist_toml.extras_require.pop("testing") + extra_req_toml = {(k, *sorted(v)) for k, v in dist_toml.extras_require.items()} + extra_req_cfg = {(k, *sorted(v)) for k, v in dist_cfg.extras_require.items()} + assert extra_req_toml == extra_req_cfg + + +PEP621_EXAMPLE = """\ +[project] +name = "spam" +version = "2020.0.0" +description = "Lovely Spam! Wonderful Spam!" +readme = "README.rst" +requires-python = ">=3.8" +license = {file = "LICENSE.txt"} +keywords = ["egg", "bacon", "sausage", "tomatoes", "Lobster Thermidor"] +authors = [ + {email = "hi@pradyunsg.me"}, + {name = "Tzu-Ping Chung"} +] +maintainers = [ + {name = "Brett Cannon", email = "brett@python.org"} +] +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python" +] + +dependencies = [ + "httpx", + "gidgethub[httpx]>4.0.0", + "django>2.1; os_name != 'nt'", + "django>2.0; os_name == 'nt'" +] + +[project.optional-dependencies] +test = [ + "pytest < 5.0.0", + "pytest-cov[all]" +] + +[project.urls] +homepage = "http://example.com" +documentation = "http://readthedocs.org" +repository = "http://github.com" +changelog = "http://github.com/me/spam/blob/master/CHANGELOG.md" + +[project.scripts] +spam-cli = "spam:main_cli" + +[project.gui-scripts] +spam-gui = "spam:main_gui" + +[project.entry-points."spam.magical"] +tomatoes = "spam:main_tomatoes" +""" + +PEP621_EXAMPLE_SCRIPT = """ +def main_cli(): pass +def main_gui(): pass +def main_tomatoes(): pass +""" + + +def _pep621_example_project(tmp_path, readme="README.rst"): + pyproject = tmp_path / "pyproject.toml" + text = PEP621_EXAMPLE + replacements = {'readme = "README.rst"': f'readme = "{readme}"'} + for orig, subst in replacements.items(): + text = text.replace(orig, subst) + pyproject.write_text(text) + + (tmp_path / readme).write_text("hello world") + (tmp_path / "LICENSE.txt").write_text("--- LICENSE stub ---") + (tmp_path / "spam.py").write_text(PEP621_EXAMPLE_SCRIPT) + return pyproject + + +def test_pep621_example(tmp_path): + """Make sure the example in PEP 621 works""" + pyproject = _pep621_example_project(tmp_path) + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert dist.metadata.license == "--- LICENSE stub ---" + assert set(dist.metadata.license_files) == {"LICENSE.txt"} + + +@pytest.mark.parametrize( + "readme, ctype", + [ + ("Readme.txt", "text/plain"), + ("readme.md", "text/markdown"), + ("text.rst", "text/x-rst"), + ] +) +def test_readme_content_type(tmp_path, readme, ctype): + pyproject = _pep621_example_project(tmp_path, readme) + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert dist.metadata.long_description_content_type == ctype + + +def test_undefined_content_type(tmp_path): + pyproject = _pep621_example_project(tmp_path, "README.tex") + with pytest.raises(ValueError, match="Undefined content type for README.tex"): + pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + + +def test_no_explicit_content_type_for_missing_extension(tmp_path): + pyproject = _pep621_example_project(tmp_path, "README") + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert dist.metadata.long_description_content_type is None + + +# TODO: After PEP 639 is accepted, we have to move the license-files +# to the `project` table instead of `tool.setuptools` +def test_license_and_license_files(tmp_path): + pyproject = _pep621_example_project(tmp_path, "README") + text = pyproject.read_text(encoding="utf-8") + + # Sanity-check + assert 'license = {file = "LICENSE.txt"}' in text + assert "[tool.setuptools]" not in text + + text += '\n[tool.setuptools]\nlicense-files = ["_FILE*"]\n' + pyproject.write_text(text, encoding="utf-8") + (tmp_path / "_FILE.txt").touch() + (tmp_path / "_FILE.rst").touch() + + # Would normally match the `license_files` glob patterns, but we want to exclude it + # by being explicit. On the other hand, its contents should be added to `license` + (tmp_path / "LICENSE.txt").write_text("LicenseRef-Proprietary\n", encoding="utf-8") + + dist = pyprojecttoml.apply_configuration(makedist(tmp_path), pyproject) + assert set(dist.metadata.license_files) == {"_FILE.rst", "_FILE.txt"} + assert dist.metadata.license == "LicenseRef-Proprietary\n" + + +class TestPresetField: + def pyproject(self, tmp_path, dynamic, extra_content=""): + content = f"[project]\nname = 'proj'\ndynamic = {dynamic!r}\n" + if "version" not in dynamic: + content += "version = '42'\n" + file = tmp_path / "pyproject.toml" + file.write_text(content + extra_content, encoding="utf-8") + return file + + @pytest.mark.parametrize( + "attr, field, value", + [ + ("install_requires", "dependencies", ["six"]), + ("classifiers", "classifiers", ["Private :: Classifier"]), + ] + ) + def test_not_listed_in_dynamic(self, tmp_path, attr, field, value): + """For the time being we just warn if the user pre-set values (e.g. via + ``setup.py``) but do not include them in ``dynamic``. + """ + pyproject = self.pyproject(tmp_path, []) + dist = makedist(tmp_path, **{attr: value}) + msg = re.compile(f"defined outside of `pyproject.toml`:.*{field}", re.S) + with pytest.warns(_WouldIgnoreField, match=msg): + dist = pyprojecttoml.apply_configuration(dist, pyproject) + + # TODO: Once support for pyproject.toml config stabilizes attr should be None + dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist) + assert dist_value == value + + @pytest.mark.parametrize( + "attr, field, value", + [ + ("install_requires", "dependencies", []), + ("extras_require", "optional-dependencies", {}), + ("install_requires", "dependencies", ["six"]), + ("classifiers", "classifiers", ["Private :: Classifier"]), + ] + ) + def test_listed_in_dynamic(self, tmp_path, attr, field, value): + pyproject = self.pyproject(tmp_path, [field]) + dist = makedist(tmp_path, **{attr: value}) + dist = pyprojecttoml.apply_configuration(dist, pyproject) + dist_value = _some_attrgetter(f"metadata.{attr}", attr)(dist) + assert dist_value == value + + def test_optional_dependencies_dont_remove_env_markers(self, tmp_path): + """ + Internally setuptools converts dependencies with markers to "extras". + If ``install_requires`` is given by ``setup.py``, we have to ensure that + applying ``optional-dependencies`` does not overwrite the mandatory + dependencies with markers (see #3204). + """ + # If setuptools replace its internal mechanism that uses `requires.txt` + # this test has to be rewritten to adapt accordingly + extra = "\n[project.optional-dependencies]\nfoo = ['bar>1']\n" + pyproject = self.pyproject(tmp_path, ["dependencies"], extra) + install_req = ['importlib-resources (>=3.0.0) ; python_version < "3.7"'] + dist = makedist(tmp_path, install_requires=install_req) + dist = pyprojecttoml.apply_configuration(dist, pyproject) + assert "foo" in dist.extras_require + assert ':python_version < "3.7"' in dist.extras_require + egg_info = dist.get_command_obj("egg_info") + write_requirements(egg_info, tmp_path, tmp_path / "requires.txt") + reqs = (tmp_path / "requires.txt").read_text(encoding="utf-8") + assert "importlib-resources" in reqs + assert "bar" in reqs + + +class TestMeta: + def test_example_file_in_sdist(self, setuptools_sdist): + """Meta test to ensure tests can run from sdist""" + with tarfile.open(setuptools_sdist) as tar: + assert any(name.endswith(EXAMPLES_FILE) for name in tar.getnames()) + + def test_example_file_not_in_wheel(self, setuptools_wheel): + """Meta test to ensure auxiliary test files are not in wheel""" + with ZipFile(setuptools_wheel) as zipfile: + assert not any(name.endswith(EXAMPLES_FILE) for name in zipfile.namelist()) + + +# --- Auxiliary Functions --- + + +def core_metadata(dist) -> str: + with io.StringIO() as buffer: + dist.metadata.write_pkg_file(buffer) + pkg_file_txt = buffer.getvalue() + + skip_prefixes = () + skip_lines = set() + # ---- DIFF NORMALISATION ---- + # PEP 621 is very particular about author/maintainer metadata conversion, so skip + skip_prefixes += ("Author:", "Author-email:", "Maintainer:", "Maintainer-email:") + # May be redundant with Home-page + skip_prefixes += ("Project-URL: Homepage,", "Home-page:") + # May be missing in original (relying on default) but backfilled in the TOML + skip_prefixes += ("Description-Content-Type:",) + # ini2toml can automatically convert `tests_require` to `testing` extra + skip_lines.add("Provides-Extra: testing") + # Remove empty lines + skip_lines.add("") + + result = [] + for line in pkg_file_txt.splitlines(): + if line.startswith(skip_prefixes) or line in skip_lines: + continue + result.append(line + "\n") + + return "".join(result) diff --git a/setuptools/tests/config/test_expand.py b/setuptools/tests/config/test_expand.py new file mode 100644 index 00000000..15053c8f --- /dev/null +++ b/setuptools/tests/config/test_expand.py @@ -0,0 +1,185 @@ +import os + +import pytest + +from distutils.errors import DistutilsOptionError +from setuptools.config import expand +from setuptools.discovery import find_package_path + + +def write_files(files, root_dir): + for file, content in files.items(): + path = root_dir / file + path.parent.mkdir(exist_ok=True, parents=True) + path.write_text(content) + + +def test_glob_relative(tmp_path, monkeypatch): + files = { + "dir1/dir2/dir3/file1.txt", + "dir1/dir2/file2.txt", + "dir1/file3.txt", + "a.ini", + "b.ini", + "dir1/c.ini", + "dir1/dir2/a.ini", + } + + write_files({k: "" for k in files}, tmp_path) + patterns = ["**/*.txt", "[ab].*", "**/[ac].ini"] + monkeypatch.chdir(tmp_path) + assert set(expand.glob_relative(patterns)) == files + # Make sure the same APIs work outside cwd + assert set(expand.glob_relative(patterns, tmp_path)) == files + + +def test_read_files(tmp_path, monkeypatch): + + dir_ = tmp_path / "dir_" + (tmp_path / "_dir").mkdir(exist_ok=True) + (tmp_path / "a.txt").touch() + files = { + "a.txt": "a", + "dir1/b.txt": "b", + "dir1/dir2/c.txt": "c" + } + write_files(files, dir_) + + with monkeypatch.context() as m: + m.chdir(dir_) + assert expand.read_files(list(files)) == "a\nb\nc" + + cannot_access_msg = r"Cannot access '.*\.\..a\.txt'" + with pytest.raises(DistutilsOptionError, match=cannot_access_msg): + expand.read_files(["../a.txt"]) + + # Make sure the same APIs work outside cwd + assert expand.read_files(list(files), dir_) == "a\nb\nc" + with pytest.raises(DistutilsOptionError, match=cannot_access_msg): + expand.read_files(["../a.txt"], dir_) + + +class TestReadAttr: + def test_read_attr(self, tmp_path, monkeypatch): + files = { + "pkg/__init__.py": "", + "pkg/sub/__init__.py": "VERSION = '0.1.1'", + "pkg/sub/mod.py": ( + "VALUES = {'a': 0, 'b': {42}, 'c': (0, 1, 1)}\n" + "raise SystemExit(1)" + ), + } + write_files(files, tmp_path) + + with monkeypatch.context() as m: + m.chdir(tmp_path) + # Make sure it can read the attr statically without evaluating the module + assert expand.read_attr('pkg.sub.VERSION') == '0.1.1' + values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}) + + assert values['a'] == 0 + assert values['b'] == {42} + + # Make sure the same APIs work outside cwd + assert expand.read_attr('pkg.sub.VERSION', root_dir=tmp_path) == '0.1.1' + values = expand.read_attr('lib.mod.VALUES', {'lib': 'pkg/sub'}, tmp_path) + assert values['c'] == (0, 1, 1) + + def test_import_order(self, tmp_path): + """ + Sometimes the import machinery will import the parent package of a nested + module, which triggers side-effects and might create problems (see issue #3176) + + ``read_attr`` should bypass these limitations by resolving modules statically + (via ast.literal_eval). + """ + files = { + "src/pkg/__init__.py": "from .main import func\nfrom .about import version", + "src/pkg/main.py": "import super_complicated_dep\ndef func(): return 42", + "src/pkg/about.py": "version = '42'", + } + write_files(files, tmp_path) + attr_desc = "pkg.about.version" + package_dir = {"": "src"} + # `import super_complicated_dep` should not run, otherwise the build fails + assert expand.read_attr(attr_desc, package_dir, tmp_path) == "42" + + +@pytest.mark.parametrize( + 'package_dir, file, module, return_value', + [ + ({"": "src"}, "src/pkg/main.py", "pkg.main", 42), + ({"pkg": "lib"}, "lib/main.py", "pkg.main", 13), + ({}, "single_module.py", "single_module", 70), + ({}, "flat_layout/pkg.py", "flat_layout.pkg", 836), + ] +) +def test_resolve_class(tmp_path, package_dir, file, module, return_value): + files = {file: f"class Custom:\n def testing(self): return {return_value}"} + write_files(files, tmp_path) + cls = expand.resolve_class(f"{module}.Custom", package_dir, tmp_path) + assert cls().testing() == return_value + + +@pytest.mark.parametrize( + 'args, pkgs', + [ + ({"where": ["."], "namespaces": False}, {"pkg", "other"}), + ({"where": [".", "dir1"], "namespaces": False}, {"pkg", "other", "dir2"}), + ({"namespaces": True}, {"pkg", "other", "dir1", "dir1.dir2"}), + ({}, {"pkg", "other", "dir1", "dir1.dir2"}), # default value for `namespaces` + ] +) +def test_find_packages(tmp_path, args, pkgs): + files = { + "pkg/__init__.py", + "other/__init__.py", + "dir1/dir2/__init__.py", + } + write_files({k: "" for k in files}, tmp_path) + + package_dir = {} + kwargs = {"root_dir": tmp_path, "fill_package_dir": package_dir, **args} + where = kwargs.get("where", ["."]) + assert set(expand.find_packages(**kwargs)) == pkgs + for pkg in pkgs: + pkg_path = find_package_path(pkg, package_dir, tmp_path) + assert os.path.exists(pkg_path) + + # Make sure the same APIs work outside cwd + where = [ + str((tmp_path / p).resolve()).replace(os.sep, "/") # ensure posix-style paths + for p in args.pop("where", ["."]) + ] + + assert set(expand.find_packages(where=where, **args)) == pkgs + + +@pytest.mark.parametrize( + "files, where, expected_package_dir", + [ + (["pkg1/__init__.py", "pkg1/other.py"], ["."], {}), + (["pkg1/__init__.py", "pkg2/__init__.py"], ["."], {}), + (["src/pkg1/__init__.py", "src/pkg1/other.py"], ["src"], {"": "src"}), + (["src/pkg1/__init__.py", "src/pkg2/__init__.py"], ["src"], {"": "src"}), + ( + ["src1/pkg1/__init__.py", "src2/pkg2/__init__.py"], + ["src1", "src2"], + {"pkg1": "src1/pkg1", "pkg2": "src2/pkg2"}, + ), + ( + ["src/pkg1/__init__.py", "pkg2/__init__.py"], + ["src", "."], + {"pkg1": "src/pkg1"}, + ), + ], +) +def test_fill_package_dir(tmp_path, files, where, expected_package_dir): + write_files({k: "" for k in files}, tmp_path) + pkg_dir = {} + kwargs = {"root_dir": tmp_path, "fill_package_dir": pkg_dir, "namespaces": False} + pkgs = expand.find_packages(where=where, **kwargs) + assert set(pkg_dir.items()) == set(expected_package_dir.items()) + for pkg in pkgs: + pkg_path = find_package_path(pkg, pkg_dir, tmp_path) + assert os.path.exists(pkg_path) diff --git a/setuptools/tests/config/test_pyprojecttoml.py b/setuptools/tests/config/test_pyprojecttoml.py new file mode 100644 index 00000000..200312b5 --- /dev/null +++ b/setuptools/tests/config/test_pyprojecttoml.py @@ -0,0 +1,415 @@ +import logging +import re +from configparser import ConfigParser +from inspect import cleandoc + +import pytest +import tomli_w +from path import Path as _Path + +from setuptools.config._apply_pyprojecttoml import _WouldIgnoreField +from setuptools.config.pyprojecttoml import ( + read_configuration, + expand_configuration, + apply_configuration, + validate, + _InvalidFile, +) +from setuptools.dist import Distribution +from setuptools.errors import OptionError + + +import setuptools # noqa -- force distutils.core to be patched +import distutils.core + +EXAMPLE = """ +[project] +name = "myproj" +keywords = ["some", "key", "words"] +dynamic = ["version", "readme"] +requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +dependencies = [ + 'importlib-metadata>=0.12;python_version<"3.8"', + 'importlib-resources>=1.0;python_version<"3.7"', + 'pathlib2>=2.3.3,<3;python_version < "3.4" and sys.platform != "win32"', +] + +[project.optional-dependencies] +docs = [ + "sphinx>=3", + "sphinx-argparse>=0.2.5", + "sphinx-rtd-theme>=0.4.3", +] +testing = [ + "pytest>=1", + "coverage>=3,<5", +] + +[project.scripts] +exec = "pkg.__main__:exec" + +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +package-dir = {"" = "src"} +zip-safe = true +platforms = ["any"] + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.setuptools.cmdclass] +sdist = "pkg.mod.CustomSdist" + +[tool.setuptools.dynamic.version] +attr = "pkg.__version__.VERSION" + +[tool.setuptools.dynamic.readme] +file = ["README.md"] +content-type = "text/markdown" + +[tool.setuptools.package-data] +"*" = ["*.txt"] + +[tool.setuptools.data-files] +"data" = ["_files/*.txt"] + +[tool.distutils.sdist] +formats = "gztar" + +[tool.distutils.bdist_wheel] +universal = true +""" + + +def create_example(path, pkg_root): + pyproject = path / "pyproject.toml" + + files = [ + f"{pkg_root}/pkg/__init__.py", + "_files/file.txt", + ] + if pkg_root != ".": # flat-layout will raise error for multi-package dist + # Ensure namespaces are discovered + files.append(f"{pkg_root}/other/nested/__init__.py") + + for file in files: + (path / file).parent.mkdir(exist_ok=True, parents=True) + (path / file).touch() + + pyproject.write_text(EXAMPLE) + (path / "README.md").write_text("hello world") + (path / f"{pkg_root}/pkg/mod.py").write_text("class CustomSdist: pass") + (path / f"{pkg_root}/pkg/__version__.py").write_text("VERSION = (3, 10)") + (path / f"{pkg_root}/pkg/__main__.py").write_text("def exec(): print('hello')") + + +def verify_example(config, path, pkg_root): + pyproject = path / "pyproject.toml" + pyproject.write_text(tomli_w.dumps(config), encoding="utf-8") + expanded = expand_configuration(config, path) + expanded_project = expanded["project"] + assert read_configuration(pyproject, expand=True) == expanded + assert expanded_project["version"] == "3.10" + assert expanded_project["readme"]["text"] == "hello world" + assert "packages" in expanded["tool"]["setuptools"] + if pkg_root == ".": + # Auto-discovery will raise error for multi-package dist + assert set(expanded["tool"]["setuptools"]["packages"]) == {"pkg"} + else: + assert set(expanded["tool"]["setuptools"]["packages"]) == { + "pkg", + "other", + "other.nested", + } + assert expanded["tool"]["setuptools"]["include-package-data"] is True + assert "" in expanded["tool"]["setuptools"]["package-data"] + assert "*" not in expanded["tool"]["setuptools"]["package-data"] + assert expanded["tool"]["setuptools"]["data-files"] == [ + ("data", ["_files/file.txt"]) + ] + + +def test_read_configuration(tmp_path): + create_example(tmp_path, "src") + pyproject = tmp_path / "pyproject.toml" + + config = read_configuration(pyproject, expand=False) + assert config["project"].get("version") is None + assert config["project"].get("readme") is None + + verify_example(config, tmp_path, "src") + + +@pytest.mark.parametrize( + "pkg_root, opts", + [ + (".", {}), + ("src", {}), + ("lib", {"packages": {"find": {"where": ["lib"]}}}), + ], +) +def test_discovered_package_dir_with_attr_directive_in_config(tmp_path, pkg_root, opts): + create_example(tmp_path, pkg_root) + + pyproject = tmp_path / "pyproject.toml" + + config = read_configuration(pyproject, expand=False) + assert config["project"].get("version") is None + assert config["project"].get("readme") is None + config["tool"]["setuptools"].pop("packages", None) + config["tool"]["setuptools"].pop("package-dir", None) + + config["tool"]["setuptools"].update(opts) + verify_example(config, tmp_path, pkg_root) + + +ENTRY_POINTS = { + "console_scripts": {"a": "mod.a:func"}, + "gui_scripts": {"b": "mod.b:func"}, + "other": {"c": "mod.c:func [extra]"}, +} + + +class TestEntryPoints: + def write_entry_points(self, tmp_path): + entry_points = ConfigParser() + entry_points.read_dict(ENTRY_POINTS) + with open(tmp_path / "entry-points.txt", "w") as f: + entry_points.write(f) + + def pyproject(self, dynamic=None): + project = {"dynamic": dynamic or ["scripts", "gui-scripts", "entry-points"]} + tool = {"dynamic": {"entry-points": {"file": "entry-points.txt"}}} + return {"project": project, "tool": {"setuptools": tool}} + + def test_all_listed_in_dynamic(self, tmp_path): + self.write_entry_points(tmp_path) + expanded = expand_configuration(self.pyproject(), tmp_path) + expanded_project = expanded["project"] + assert len(expanded_project["scripts"]) == 1 + assert expanded_project["scripts"]["a"] == "mod.a:func" + assert len(expanded_project["gui-scripts"]) == 1 + assert expanded_project["gui-scripts"]["b"] == "mod.b:func" + assert len(expanded_project["entry-points"]) == 1 + assert expanded_project["entry-points"]["other"]["c"] == "mod.c:func [extra]" + + @pytest.mark.parametrize("missing_dynamic", ("scripts", "gui-scripts")) + def test_scripts_not_listed_in_dynamic(self, tmp_path, missing_dynamic): + self.write_entry_points(tmp_path) + dynamic = {"scripts", "gui-scripts", "entry-points"} - {missing_dynamic} + + msg = f"defined outside of `pyproject.toml`:.*{missing_dynamic}" + with pytest.warns(_WouldIgnoreField, match=re.compile(msg, re.S)): + expanded = expand_configuration(self.pyproject(dynamic), tmp_path) + + expanded_project = expanded["project"] + assert dynamic < set(expanded_project) + assert len(expanded_project["entry-points"]) == 1 + # TODO: Test the following when pyproject.toml support stabilizes: + # >>> assert missing_dynamic not in expanded_project + + +class TestClassifiers: + def test_dynamic(self, tmp_path): + # Let's create a project example that has dynamic classifiers + # coming from a txt file. + create_example(tmp_path, "src") + classifiers = """\ + Framework :: Flask + Programming Language :: Haskell + """ + (tmp_path / "classifiers.txt").write_text(cleandoc(classifiers)) + + pyproject = tmp_path / "pyproject.toml" + config = read_configuration(pyproject, expand=False) + dynamic = config["project"]["dynamic"] + config["project"]["dynamic"] = list({*dynamic, "classifiers"}) + dynamic_config = config["tool"]["setuptools"]["dynamic"] + dynamic_config["classifiers"] = {"file": "classifiers.txt"} + + # When the configuration is expanded, + # each line of the file should be an different classifier. + validate(config, pyproject) + expanded = expand_configuration(config, tmp_path) + + assert set(expanded["project"]["classifiers"]) == { + "Framework :: Flask", + "Programming Language :: Haskell", + } + + def test_dynamic_without_config(self, tmp_path): + config = """ + [project] + name = "myproj" + version = '42' + dynamic = ["classifiers"] + """ + + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(config)) + with pytest.raises(OptionError, match="No configuration .* .classifiers."): + read_configuration(pyproject) + + def test_dynamic_readme_from_setup_script_args(self, tmp_path): + config = """ + [project] + name = "myproj" + version = '42' + dynamic = ["readme"] + """ + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(config)) + dist = Distribution(attrs={"long_description": "42"}) + # No error should occur because of missing `readme` + dist = apply_configuration(dist, pyproject) + assert dist.metadata.long_description == "42" + + def test_dynamic_without_file(self, tmp_path): + config = """ + [project] + name = "myproj" + version = '42' + dynamic = ["classifiers"] + + [tool.setuptools.dynamic] + classifiers = {file = ["classifiers.txt"]} + """ + + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(config)) + with pytest.warns(UserWarning, match="File .*classifiers.txt. cannot be found"): + expanded = read_configuration(pyproject) + assert "classifiers" not in expanded["project"] + + +@pytest.mark.parametrize( + "example", + ( + """ + [project] + name = "myproj" + version = "1.2" + + [my-tool.that-disrespect.pep518] + value = 42 + """, + ), +) +def test_ignore_unrelated_config(tmp_path, example): + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(example)) + + # Make sure no error is raised due to 3rd party configs in pyproject.toml + assert read_configuration(pyproject) is not None + + +@pytest.mark.parametrize( + "example, error_msg, value_shown_in_debug", + [ + ( + """ + [project] + name = "myproj" + version = "1.2" + requires = ['pywin32; platform_system=="Windows"' ] + """, + "configuration error: `project` must not contain {'requires'} properties", + '"requires": ["pywin32; platform_system==\\"Windows\\""]', + ), + ], +) +def test_invalid_example(tmp_path, caplog, example, error_msg, value_shown_in_debug): + caplog.set_level(logging.DEBUG) + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(cleandoc(example)) + + caplog.clear() + with pytest.raises(ValueError, match="invalid pyproject.toml"): + read_configuration(pyproject) + + # Make sure the logs give guidance to the user + error_log = caplog.record_tuples[0] + assert error_log[1] == logging.ERROR + assert error_msg in error_log[2] + + debug_log = caplog.record_tuples[1] + assert debug_log[1] == logging.DEBUG + debug_msg = "".join(line.strip() for line in debug_log[2].splitlines()) + assert value_shown_in_debug in debug_msg + + +@pytest.mark.parametrize("config", ("", "[tool.something]\nvalue = 42")) +def test_empty(tmp_path, config): + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(config) + + # Make sure no error is raised + assert read_configuration(pyproject) == {} + + +@pytest.mark.parametrize("config", ("[project]\nname = 'myproj'\nversion='42'\n",)) +def test_include_package_data_by_default(tmp_path, config): + """Builds with ``pyproject.toml`` should consider ``include-package-data=True`` as + default. + """ + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(config) + + config = read_configuration(pyproject) + assert config["tool"]["setuptools"]["include-package-data"] is True + + +def test_include_package_data_in_setuppy(tmp_path): + """Builds with ``pyproject.toml`` should consider ``include_package_data`` set in + ``setup.py``. + + See https://github.com/pypa/setuptools/issues/3197#issuecomment-1079023889 + """ + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text("[project]\nname = 'myproj'\nversion='42'\n") + setuppy = tmp_path / "setup.py" + setuppy.write_text("__import__('setuptools').setup(include_package_data=False)") + + with _Path(tmp_path): + dist = distutils.core.run_setup("setup.py", {}, stop_after="config") + + assert dist.get_name() == "myproj" + assert dist.get_version() == "42" + assert dist.include_package_data is False + + +class TestSkipBadConfig: + @pytest.mark.parametrize( + "setup_attrs", + [ + {"name": "myproj"}, + {"install_requires": ["does-not-exist"]}, + ], + ) + @pytest.mark.parametrize( + "pyproject_content", + [ + "[project]\nrequires-python = '>=3.7'\n", + "[project]\nversion = '42'\nrequires-python = '>=3.7'\n", + "[project]\nname='othername'\nrequires-python = '>=3.7'\n", + ], + ) + def test_popular_config(self, tmp_path, pyproject_content, setup_attrs): + # See pypa/setuptools#3199 and pypa/cibuildwheel#1064 + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text(pyproject_content) + dist = Distribution(attrs=setup_attrs) + + prev_name = dist.get_name() + prev_deps = dist.install_requires + + with pytest.warns(_InvalidFile, match=r"DO NOT include.*\[project\].* table"): + dist = apply_configuration(dist, pyproject) + + assert dist.get_name() != "othername" + assert dist.get_name() == prev_name + assert dist.python_requires is None + assert set(dist.install_requires) == set(prev_deps) diff --git a/setuptools/tests/test_config.py b/setuptools/tests/config/test_setupcfg.py index 005742e4..1f35f836 100644 --- a/setuptools/tests/test_config.py +++ b/setuptools/tests/config/test_setupcfg.py @@ -1,21 +1,20 @@ -import types -import sys - -import contextlib import configparser +import contextlib +import inspect +from pathlib import Path +from unittest.mock import Mock, patch import pytest from distutils.errors import DistutilsOptionError, DistutilsFileError -from mock import patch from setuptools.dist import Distribution, _Distribution -from setuptools.config import ConfigHandler, read_configuration -from distutils.core import Command -from .textwrap import DALS +from setuptools.config.setupcfg import ConfigHandler, read_configuration +from ..textwrap import DALS class ErrConfigHandler(ConfigHandler): """Erroneous handler. Fails to implement required methods.""" + section_prefix = "**err**" def make_package_dir(name, base_dir, ns=False): @@ -70,7 +69,7 @@ def get_dist(tmpdir, kwargs_initial=None, parse=True): def test_parsers_implemented(): with pytest.raises(NotImplementedError): - handler = ErrConfigHandler(None, {}) + handler = ErrConfigHandler(None, {}, False, Mock()) handler.parsers @@ -186,9 +185,12 @@ class TestMetadata: def test_file_sandboxed(self, tmpdir): - fake_env(tmpdir, '[metadata]\n' 'long_description = file: ../../README\n') + tmpdir.ensure("README") + project = tmpdir.join('depth1', 'depth2') + project.ensure(dir=True) + fake_env(project, '[metadata]\n' 'long_description = file: ../../README\n') - with get_dist(tmpdir, parse=False) as dist: + with get_dist(project, parse=False) as dist: with pytest.raises(DistutilsOptionError): dist.parse_config_files() # file: out of sandbox @@ -859,22 +861,25 @@ class TestOptions: dist.parse_config_files() def test_cmdclass(self, tmpdir): - class CustomCmd(Command): - pass - - m = types.ModuleType('custom_build', 'test package') - - m.__dict__['CustomCmd'] = CustomCmd - - sys.modules['custom_build'] = m - - fake_env( - tmpdir, - '[options]\n' 'cmdclass =\n' ' customcmd = custom_build.CustomCmd\n', + module_path = Path(tmpdir, "src/custom_build.py") # auto discovery for src + module_path.parent.mkdir(parents=True, exist_ok=True) + module_path.write_text( + "from distutils.core import Command\n" + "class CustomCmd(Command): pass\n" ) + setup_cfg = """ + [options] + cmdclass = + customcmd = custom_build.CustomCmd + """ + fake_env(tmpdir, inspect.cleandoc(setup_cfg)) + with get_dist(tmpdir) as dist: - assert dist.cmdclass == {'customcmd': CustomCmd} + cmdclass = dist.cmdclass['customcmd'] + assert cmdclass.__name__ == "CustomCmd" + assert cmdclass.__module__ == "custom_build" + assert module_path.samefile(inspect.getfile(cmdclass)) saved_dist_init = _Distribution.__init__ diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py index 51ce8984..58948824 100644 --- a/setuptools/tests/contexts.py +++ b/setuptools/tests/contexts.py @@ -7,6 +7,7 @@ import site import io import pkg_resources +from filelock import FileLock @contextlib.contextmanager @@ -96,3 +97,29 @@ def suppress_exceptions(*excs): yield except excs: pass + + +def multiproc(request): + """ + Return True if running under xdist and multiple + workers are used. + """ + try: + worker_id = request.getfixturevalue('worker_id') + except Exception: + return False + return worker_id != 'master' + + +@contextlib.contextmanager +def session_locked_tmp_dir(request, tmp_path_factory, name): + """Uses a file lock to guarantee only one worker can access a temp dir""" + # get the temp directory shared by all workers + base = tmp_path_factory.getbasetemp() + shared_dir = base.parent if multiproc(request) else base + + locked_dir = shared_dir / name + with FileLock(locked_dir.with_suffix(".lock")): + # ^-- prevent multiple workers to access the directory at once + locked_dir.mkdir(exist_ok=True, parents=True) + yield locked_dir diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py index c0274c33..bcf29601 100644 --- a/setuptools/tests/environment.py +++ b/setuptools/tests/environment.py @@ -1,9 +1,38 @@ import os import sys +import subprocess import unicodedata - from subprocess import Popen as _Popen, PIPE as _PIPE +import jaraco.envs + + +class VirtualEnv(jaraco.envs.VirtualEnv): + name = '.env' + # Some version of PyPy will import distutils on startup, implicitly + # importing setuptools, and thus leading to BackendInvalid errors + # when upgrading Setuptools. Bypass this behavior by avoiding the + # early availability and need to upgrade. + create_opts = ['--no-setuptools'] + + def run(self, cmd, *args, **kwargs): + cmd = [self.exe(cmd[0])] + cmd[1:] + kwargs = {"cwd": self.root, **kwargs} # Allow overriding + # In some environments (eg. downstream distro packaging), where: + # - tox isn't used to run tests and + # - PYTHONPATH is set to point to a specific setuptools codebase and + # - no custom env is explicitly set by a test + # PYTHONPATH will leak into the spawned processes. + # In that case tests look for module in the wrong place (on PYTHONPATH). + # Unless the test sets its own special env, pass a copy of the existing + # environment with removed PYTHONPATH to the subprocesses. + if "env" not in kwargs: + env = dict(os.environ) + if "PYTHONPATH" in env: + del env["PYTHONPATH"] + kwargs["env"] = env + return subprocess.check_output(cmd, *args, **kwargs) + def _which_dirs(cmd): result = set() diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py index a5a172e0..25ab49fd 100644 --- a/setuptools/tests/fixtures.py +++ b/setuptools/tests/fixtures.py @@ -1,11 +1,13 @@ +import os import contextlib import sys -import shutil import subprocess +from pathlib import Path import pytest +import path -from . import contexts +from . import contexts, environment @pytest.fixture @@ -28,22 +30,6 @@ def tmpdir_cwd(tmpdir): yield orig -@pytest.fixture -def tmp_src(request, tmp_path): - """Make a copy of the source dir under `$tmp/src`. - - This fixture is useful whenever it's necessary to run `setup.py` - or `pip install` against the source directory when there's no - control over the number of simultaneous invocations. Such - concurrent runs create and delete directories with the same names - under the target directory and so they influence each other's runs - when they are not being executed sequentially. - """ - tmp_src_path = tmp_path / 'src' - shutil.copytree(request.config.rootdir, tmp_src_path) - return tmp_src_path - - @pytest.fixture(autouse=True, scope="session") def workaround_xdist_376(request): """ @@ -72,3 +58,83 @@ def sample_project(tmp_path): except Exception: pytest.skip("Unable to clone sampleproject") return tmp_path / 'sampleproject' + + +# sdist and wheel artifacts should be stable across a round of tests +# so we can build them once per session and use the files as "readonly" + + +@pytest.fixture(scope="session") +def setuptools_sdist(tmp_path_factory, request): + if os.getenv("PRE_BUILT_SETUPTOOLS_SDIST"): + return Path(os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")).resolve() + + with contexts.session_locked_tmp_dir( + request, tmp_path_factory, "sdist_build") as tmp: + dist = next(tmp.glob("*.tar.gz"), None) + if dist: + return dist + + subprocess.check_call([ + sys.executable, "-m", "build", "--sdist", + "--outdir", str(tmp), str(request.config.rootdir) + ]) + return next(tmp.glob("*.tar.gz")) + + +@pytest.fixture(scope="session") +def setuptools_wheel(tmp_path_factory, request): + if os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL"): + return Path(os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")).resolve() + + with contexts.session_locked_tmp_dir( + request, tmp_path_factory, "wheel_build") as tmp: + dist = next(tmp.glob("*.whl"), None) + if dist: + return dist + + subprocess.check_call([ + sys.executable, "-m", "build", "--wheel", + "--outdir", str(tmp) , str(request.config.rootdir) + ]) + return next(tmp.glob("*.whl")) + + +@pytest.fixture +def venv(tmp_path, setuptools_wheel): + """Virtual env with the version of setuptools under test installed""" + env = environment.VirtualEnv() + env.root = path.Path(tmp_path / 'venv') + env.req = str(setuptools_wheel) + # In some environments (eg. downstream distro packaging), + # where tox isn't used to run tests and PYTHONPATH is set to point to + # a specific setuptools codebase, PYTHONPATH will leak into the spawned + # processes. + # env.create() should install the just created setuptools + # wheel, but it doesn't if it finds another existing matching setuptools + # installation present on PYTHONPATH: + # `setuptools is already installed with the same version as the provided + # wheel. Use --force-reinstall to force an installation of the wheel.` + # This prevents leaking PYTHONPATH to the created environment. + with contexts.environment(PYTHONPATH=None): + return env.create() + + +@pytest.fixture +def venv_without_setuptools(tmp_path): + """Virtual env without any version of setuptools installed""" + env = environment.VirtualEnv() + env.root = path.Path(tmp_path / 'venv_without_setuptools') + env.create_opts = ['--no-setuptools'] + env.ensure_env() + return env + + +@pytest.fixture +def bare_venv(tmp_path): + """Virtual env without any common packages installed""" + env = environment.VirtualEnv() + env.root = path.Path(tmp_path / 'bare_venv') + env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed'] + env.ensure_env() + return env diff --git a/setuptools/tests/integration/__init__.py b/setuptools/tests/integration/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/setuptools/tests/integration/__init__.py diff --git a/setuptools/tests/integration/helpers.py b/setuptools/tests/integration/helpers.py new file mode 100644 index 00000000..24c02be0 --- /dev/null +++ b/setuptools/tests/integration/helpers.py @@ -0,0 +1,75 @@ +"""Reusable functions and classes for different types of integration tests. + +For example ``Archive`` can be used to check the contents of distribution built +with setuptools, and ``run`` will always try to be as verbose as possible to +facilitate debugging. +""" +import os +import subprocess +import tarfile +from zipfile import ZipFile +from pathlib import Path + + +def run(cmd, env=None): + r = subprocess.run( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + env={**os.environ, **(env or {})} + # ^-- allow overwriting instead of discarding the current env + ) + + out = r.stdout + "\n" + r.stderr + # pytest omits stdout/err by default, if the test fails they help debugging + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print(f"Command: {cmd}\nreturn code: {r.returncode}\n\n{out}") + + if r.returncode == 0: + return out + raise subprocess.CalledProcessError(r.returncode, cmd, r.stdout, r.stderr) + + +class Archive: + """Compatibility layer for ZipFile/Info and TarFile/Info""" + def __init__(self, filename): + self._filename = filename + if filename.endswith("tar.gz"): + self._obj = tarfile.open(filename, "r:gz") + elif filename.endswith("zip"): + self._obj = ZipFile(filename) + else: + raise ValueError(f"{filename} doesn't seem to be a zip or tar.gz") + + def __iter__(self): + if hasattr(self._obj, "infolist"): + return iter(self._obj.infolist()) + return iter(self._obj) + + def get_name(self, zip_or_tar_info): + if hasattr(zip_or_tar_info, "filename"): + return zip_or_tar_info.filename + return zip_or_tar_info.name + + def get_content(self, zip_or_tar_info): + if hasattr(self._obj, "extractfile"): + content = self._obj.extractfile(zip_or_tar_info) + if content is None: + msg = f"Invalid {zip_or_tar_info.name} in {self._filename}" + raise ValueError(msg) + return str(content.read(), "utf-8") + return str(self._obj.read(zip_or_tar_info), "utf-8") + + +def get_sdist_members(sdist_path): + with tarfile.open(sdist_path, "r:gz") as tar: + files = [Path(f) for f in tar.getnames()] + # remove root folder + relative_files = ("/".join(f.parts[1:]) for f in files) + return {f for f in relative_files if f} + + +def get_wheel_members(wheel_path): + with ZipFile(wheel_path) as zipfile: + return set(zipfile.namelist()) diff --git a/setuptools/tests/integration/test_pip_install_sdist.py b/setuptools/tests/integration/test_pip_install_sdist.py new file mode 100644 index 00000000..9d11047b --- /dev/null +++ b/setuptools/tests/integration/test_pip_install_sdist.py @@ -0,0 +1,219 @@ +"""Integration tests for setuptools that focus on building packages via pip. + +The idea behind these tests is not to exhaustively check all the possible +combinations of packages, operating systems, supporting libraries, etc, but +rather check a limited number of popular packages and how they interact with +the exposed public API. This way if any change in API is introduced, we hope to +identify backward compatibility problems before publishing a release. + +The number of tested packages is purposefully kept small, to minimise duration +and the associated maintenance cost (changes in the way these packages define +their build process may require changes in the tests). +""" +import json +import os +import shutil +import sys +from enum import Enum +from glob import glob +from hashlib import md5 +from urllib.request import urlopen + +import pytest +from packaging.requirements import Requirement + +from .helpers import Archive, run + + +pytestmark = pytest.mark.integration + +LATEST, = list(Enum("v", "LATEST")) +"""Default version to be checked""" +# There are positive and negative aspects of checking the latest version of the +# packages. +# The main positive aspect is that the latest version might have already +# removed the use of APIs deprecated in previous releases of setuptools. + + +# Packages to be tested: +# (Please notice the test environment cannot support EVERY library required for +# compiling binary extensions. In Ubuntu/Debian nomenclature, we only assume +# that `build-essential`, `gfortran` and `libopenblas-dev` are installed, +# due to their relevance to the numerical/scientific programming ecosystem) +EXAMPLES = [ + ("pandas", LATEST), # cython + custom build_ext + ("sphinx", LATEST), # custom setup.py + ("pip", LATEST), # just in case... + ("pytest", LATEST), # uses setuptools_scm + ("mypy", LATEST), # custom build_py + ext_modules + + # --- Popular packages: https://hugovk.github.io/top-pypi-packages/ --- + ("botocore", LATEST), + ("kiwisolver", "1.3.2"), # build_ext, version pinned due to setup_requires + ("brotli", LATEST), # not in the list but used by urllib3 + + # When adding packages to this list, make sure they expose a `__version__` + # attribute, or modify the tests below +] + + +# Some packages have "optional" dependencies that modify their build behaviour +# and are not listed in pyproject.toml, others still use `setup_requires` +EXTRA_BUILD_DEPS = { + "sphinx": ("babel>=1.3",), + "kiwisolver": ("cppy>=1.1.0",) +} + + +VIRTUALENV = (sys.executable, "-m", "virtualenv") + + +# By default, pip will try to build packages in isolation (PEP 517), which +# means it will download the previous stable version of setuptools. +# `pip` flags can avoid that (the version of setuptools under test +# should be the one to be used) +SDIST_OPTIONS = ( + "--ignore-installed", + "--no-build-isolation", + # We don't need "--no-binary :all:" since we specify the path to the sdist. + # It also helps with performance, since dependencies can come from wheels. +) +# The downside of `--no-build-isolation` is that pip will not download build +# dependencies. The test script will have to also handle that. + + +@pytest.fixture +def venv_python(tmp_path): + run([*VIRTUALENV, str(tmp_path / ".venv")]) + possible_path = (str(p.parent) for p in tmp_path.glob(".venv/*/python*")) + return shutil.which("python", path=os.pathsep.join(possible_path)) + + +@pytest.fixture(autouse=True) +def _prepare(tmp_path, venv_python, monkeypatch, request): + download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path)) + os.makedirs(download_path, exist_ok=True) + + # Environment vars used for building some of the packages + monkeypatch.setenv("USE_MYPYC", "1") + + def _debug_info(): + # Let's provide the maximum amount of information possible in the case + # it is necessary to debug the tests directly from the CI logs. + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print("Temporary directory:") + map(print, tmp_path.glob("*")) + print("Virtual environment:") + run([venv_python, "-m", "pip", "freeze"]) + request.addfinalizer(_debug_info) + + +ALREADY_LOADED = ("pytest", "mypy") # loaded by pytest/pytest-enabler + + +@pytest.mark.parametrize('package, version', EXAMPLES) +@pytest.mark.uses_network +def test_install_sdist(package, version, tmp_path, venv_python, setuptools_wheel): + venv_pip = (venv_python, "-m", "pip") + sdist = retrieve_sdist(package, version, tmp_path) + deps = build_deps(package, sdist) + if deps: + print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + print("Dependencies:", deps) + run([*venv_pip, "install", *deps]) + + # Use a virtualenv to simulate PEP 517 isolation + # but install fresh setuptools wheel to ensure the version under development + run([*venv_pip, "install", "-I", setuptools_wheel]) + run([*venv_pip, "install", *SDIST_OPTIONS, sdist]) + + # Execute a simple script to make sure the package was installed correctly + script = f"import {package}; print(getattr({package}, '__version__', 0))" + run([venv_python, "-c", script]) + + +# ---- Helper Functions ---- + + +def retrieve_sdist(package, version, tmp_path): + """Either use cached sdist file or download it from PyPI""" + # `pip download` cannot be used due to + # https://github.com/pypa/pip/issues/1884 + # https://discuss.python.org/t/pep-625-file-name-of-a-source-distribution/4686 + # We have to find the correct distribution file and download it + download_path = os.getenv("DOWNLOAD_PATH", str(tmp_path)) + dist = retrieve_pypi_sdist_metadata(package, version) + + # Remove old files to prevent cache to grow indefinitely + for file in glob(os.path.join(download_path, f"{package}*")): + if dist["filename"] != file: + os.unlink(file) + + dist_file = os.path.join(download_path, dist["filename"]) + if not os.path.exists(dist_file): + download(dist["url"], dist_file, dist["md5_digest"]) + return dist_file + + +def retrieve_pypi_sdist_metadata(package, version): + # https://warehouse.pypa.io/api-reference/json.html + id_ = package if version is LATEST else f"{package}/{version}" + with urlopen(f"https://pypi.org/pypi/{id_}/json") as f: + metadata = json.load(f) + + if metadata["info"]["yanked"]: + raise ValueError(f"Release for {package} {version} was yanked") + + version = metadata["info"]["version"] + release = metadata["releases"][version] + dists = [d for d in release if d["packagetype"] == "sdist"] + if len(dists) == 0: + raise ValueError(f"No sdist found for {package} {version}") + + for dist in dists: + if dist["filename"].endswith(".tar.gz"): + return dist + + # Not all packages are publishing tar.gz + return dist + + +def download(url, dest, md5_digest): + with urlopen(url) as f: + data = f.read() + + assert md5(data).hexdigest() == md5_digest + + with open(dest, "wb") as f: + f.write(data) + + assert os.path.exists(dest) + + +def build_deps(package, sdist_file): + """Find out what are the build dependencies for a package. + + We need to "manually" install them, since pip will not install build + deps with `--no-build-isolation`. + """ + import tomli as toml + + # delay importing, since pytest discovery phase may hit this file from a + # testenv without tomli + + archive = Archive(sdist_file) + pyproject = _read_pyproject(archive) + + info = toml.loads(pyproject) + deps = info.get("build-system", {}).get("requires", []) + deps += EXTRA_BUILD_DEPS.get(package, []) + # Remove setuptools from requirements (and deduplicate) + requirements = {Requirement(d).name: d for d in deps} + return [v for k, v in requirements.items() if k != "setuptools"] + + +def _read_pyproject(archive): + for member in archive: + if os.path.basename(archive.get_name(member)) == "pyproject.toml": + return archive.get_content(member) + return "" diff --git a/setuptools/tests/requirements.txt b/setuptools/tests/requirements.txt deleted file mode 100644 index b2d84a94..00000000 --- a/setuptools/tests/requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -mock -pytest-flake8 -flake8-2020; python_version>="3.6" -virtualenv>=13.0.0 -pytest-virtualenv>=1.2.7 -pytest>=3.7 -wheel -coverage>=4.5.1 -pytest-cov>=2.5.1 -paver; python_version>="3.6" -futures; python_version=="2.7" -pip>=19.1 # For proper file:// URLs support. -jaraco.envs -sphinx diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py index 28482fd0..1a900c67 100644 --- a/setuptools/tests/test_bdist_deprecations.py +++ b/setuptools/tests/test_bdist_deprecations.py @@ -11,7 +11,7 @@ from setuptools import SetuptoolsDeprecationWarning @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only') @mock.patch('distutils.command.bdist_rpm.bdist_rpm') -def test_bdist_rpm_warning(distutils_cmd): +def test_bdist_rpm_warning(distutils_cmd, tmpdir_cwd): dist = Distribution( dict( script_name='setup.py', diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index fb5b90b1..67f788cc 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -13,7 +13,7 @@ from . import contexts SETUP_PY = """\ from setuptools import setup -setup(name='foo', py_modules=['hi']) +setup(py_modules=['hi']) """ @@ -52,7 +52,6 @@ class Test: dist = Distribution(dict( script_name='setup.py', script_args=['bdist_egg', '--exclude-source-files'], - name='foo', py_modules=['hi'], )) with contexts.quiet(): diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index 0f4a1a73..36940e76 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -1,15 +1,32 @@ import os +import sys import shutil +import signal import tarfile import importlib +import contextlib from concurrent import futures import re +from zipfile import ZipFile import pytest from jaraco import path from .textwrap import DALS +SETUP_SCRIPT_STUB = "__import__('setuptools').setup()" + + +TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds +IS_PYPY = '__pypy__' in sys.builtin_module_names + + +pytestmark = pytest.mark.skipif( + sys.platform == "win32" and IS_PYPY, + reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor " + "is flaky and problematic" +) + class BuildBackendBase: def __init__(self, cwd='.', env={}, backend_name='setuptools.build_meta'): @@ -31,10 +48,27 @@ class BuildBackend(BuildBackendBase): def method(*args, **kw): root = os.path.abspath(self.cwd) caller = BuildBackendCaller(root, self.env, self.backend_name) - return self.pool.submit(caller, name, *args, **kw).result() + pid = None + try: + pid = self.pool.submit(os.getpid).result(TIMEOUT) + return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT) + except futures.TimeoutError: + self.pool.shutdown(wait=False) # doesn't stop already running processes + self._kill(pid) + pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)") + except (futures.process.BrokenProcessPool, MemoryError, OSError): + if IS_PYPY: + pytest.xfail("PyPy frequently fails tests with ProcessPoolExector") + raise return method + def _kill(self, pid): + if pid is None: + return + with contextlib.suppress(ProcessLookupError, OSError): + os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL) + class BuildBackendCaller(BuildBackendBase): def __init__(self, *args, **kwargs): @@ -58,7 +92,7 @@ class BuildBackendCaller(BuildBackendBase): defns = [ - { + { # simple setup.py script 'setup.py': DALS(""" __import__('setuptools').setup( name='foo', @@ -72,7 +106,7 @@ defns = [ print('hello') """), }, - { + { # setup.py that relies on __name__ 'setup.py': DALS(""" assert __name__ == '__main__' __import__('setuptools').setup( @@ -87,7 +121,7 @@ defns = [ print('hello') """), }, - { + { # setup.py script that runs arbitrary code 'setup.py': DALS(""" variable = True def function(): @@ -105,7 +139,30 @@ defns = [ print('hello') """), }, - { + { # setup.py script that constructs temp files to be included in the distribution + 'setup.py': DALS(""" + # Some packages construct files on the fly, include them in the package, + # and immediately remove them after `setup()` (e.g. pybind11==2.9.1). + # Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)` + # to obtain a distribution object first, and then run the distutils + # commands later, because these files will be removed in the meantime. + + with open('world.py', 'w') as f: + f.write('x = 42') + + try: + __import__('setuptools').setup( + name='foo', + version='0.0.0', + py_modules=['world'], + setup_requires=['six'], + ) + finally: + # Some packages will clean temporary files + __import__('os').unlink('world.py') + """), + }, + { # setup.cfg only 'setup.cfg': DALS(""" [metadata] name = foo @@ -120,6 +177,22 @@ defns = [ print('hello') """) }, + { # setup.cfg and setup.py + 'setup.cfg': DALS(""" + [metadata] + name = foo + version = 0.0.0 + + [options] + py_modules=hello + setup_requires=six + """), + 'setup.py': "__import__('setuptools').setup()", + 'hello.py': DALS(""" + def run(): + print('hello') + """) + }, ] @@ -150,7 +223,20 @@ class TestBuildMetaBackend: os.makedirs(dist_dir) wheel_name = build_backend.build_wheel(dist_dir) - assert os.path.isfile(os.path.join(dist_dir, wheel_name)) + wheel_file = os.path.join(dist_dir, wheel_name) + assert os.path.isfile(wheel_file) + + # Temporary files should be removed + assert not os.path.isfile('world.py') + + with ZipFile(wheel_file) as zipfile: + wheel_contents = set(zipfile.namelist()) + + # Each one of the examples have a single module + # that should be included in the distribution + python_scripts = (f for f in wheel_contents if f.endswith('.py')) + modules = [f for f in python_scripts if not f.endswith('setup.py')] + assert len(modules) == 1 @pytest.mark.parametrize('build_type', ('wheel', 'sdist')) def test_build_with_existing_file_present(self, build_type, tmpdir_cwd): @@ -199,6 +285,190 @@ class TestBuildMetaBackend: assert third_result == second_result assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0 + @pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB]) + def test_build_with_pyproject_config(self, tmpdir, setup_script): + files = { + 'pyproject.toml': DALS(""" + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "foo" + license = {text = "MIT"} + description = "This is a Python package" + dynamic = ["version", "readme"] + classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers" + ] + urls = {Homepage = "http://github.com"} + dependencies = [ + "appdirs", + ] + + [project.optional-dependencies] + all = [ + "tomli>=1", + "pyscaffold>=4,<5", + 'importlib; python_version == "2.6"', + ] + + [project.scripts] + foo = "foo.cli:main" + + [tool.setuptools] + zip-safe = false + package-dir = {"" = "src"} + packages = {find = {where = ["src"]}} + license-files = ["LICENSE*"] + + [tool.setuptools.dynamic] + version = {attr = "foo.__version__"} + readme = {file = "README.rst"} + + [tool.distutils.sdist] + formats = "gztar" + + [tool.distutils.bdist_wheel] + universal = true + """), + "MANIFEST.in": DALS(""" + global-include *.py *.txt + global-exclude *.py[cod] + """), + "README.rst": "This is a ``README``", + "LICENSE.txt": "---- placeholder MIT license ----", + "src": { + "foo": { + "__init__.py": "__version__ = '0.1'", + "cli.py": "def main(): print('hello world')", + "data.txt": "def main(): print('hello world')", + } + } + } + if setup_script: + files["setup.py"] = setup_script + + build_backend = self.get_build_backend() + with tmpdir.as_cwd(): + path.build(files) + sdist_path = build_backend.build_sdist("temp") + wheel_file = build_backend.build_wheel("temp") + + with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar: + sdist_contents = set(tar.getnames()) + + with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile: + wheel_contents = set(zipfile.namelist()) + metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8") + license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8") + epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8") + + assert sdist_contents - {"foo-0.1/setup.py"} == { + 'foo-0.1', + 'foo-0.1/LICENSE.txt', + 'foo-0.1/MANIFEST.in', + 'foo-0.1/PKG-INFO', + 'foo-0.1/README.rst', + 'foo-0.1/pyproject.toml', + 'foo-0.1/setup.cfg', + 'foo-0.1/src', + 'foo-0.1/src/foo', + 'foo-0.1/src/foo/__init__.py', + 'foo-0.1/src/foo/cli.py', + 'foo-0.1/src/foo/data.txt', + 'foo-0.1/src/foo.egg-info', + 'foo-0.1/src/foo.egg-info/PKG-INFO', + 'foo-0.1/src/foo.egg-info/SOURCES.txt', + 'foo-0.1/src/foo.egg-info/dependency_links.txt', + 'foo-0.1/src/foo.egg-info/entry_points.txt', + 'foo-0.1/src/foo.egg-info/requires.txt', + 'foo-0.1/src/foo.egg-info/top_level.txt', + 'foo-0.1/src/foo.egg-info/not-zip-safe', + } + assert wheel_contents == { + "foo/__init__.py", + "foo/cli.py", + "foo/data.txt", # include_package_data defaults to True + "foo-0.1.dist-info/LICENSE.txt", + "foo-0.1.dist-info/METADATA", + "foo-0.1.dist-info/WHEEL", + "foo-0.1.dist-info/entry_points.txt", + "foo-0.1.dist-info/top_level.txt", + "foo-0.1.dist-info/RECORD", + } + assert license == "---- placeholder MIT license ----" + for line in ( + "Summary: This is a Python package", + "License: MIT", + "Classifier: Intended Audience :: Developers", + "Requires-Dist: appdirs", + "Requires-Dist: tomli (>=1) ; extra == 'all'", + "Requires-Dist: importlib ; (python_version == \"2.6\") and extra == 'all'" + ): + assert line in metadata + + assert metadata.strip().endswith("This is a ``README``") + assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main" + + def test_static_metadata_in_pyproject_config(self, tmpdir): + # Make sure static metadata in pyproject.toml is not overwritten by setup.py + # as required by PEP 621 + files = { + 'pyproject.toml': DALS(""" + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "foo" + description = "This is a Python package" + version = "42" + dependencies = ["six"] + """), + 'hello.py': DALS(""" + def run(): + print('hello') + """), + 'setup.py': DALS(""" + __import__('setuptools').setup( + name='bar', + version='13', + ) + """), + } + build_backend = self.get_build_backend() + with tmpdir.as_cwd(): + path.build(files) + sdist_path = build_backend.build_sdist("temp") + wheel_file = build_backend.build_wheel("temp") + + assert (tmpdir / "temp/foo-42.tar.gz").exists() + assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists() + assert not (tmpdir / "temp/bar-13.tar.gz").exists() + assert not (tmpdir / "temp/bar-42.tar.gz").exists() + assert not (tmpdir / "temp/foo-13.tar.gz").exists() + assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists() + assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists() + assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists() + + with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar: + pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8") + members = tar.getnames() + assert "bar-13/PKG-INFO" not in members + + with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile: + metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8") + members = zipfile.namelist() + assert "bar-13.dist-info/METADATA" not in members + + for file in pkg_info, metadata: + for line in ("Name: foo", "Version: 42"): + assert line in file + for line in ("Name: bar", "Version: 13"): + assert line not in file + def test_build_sdist(self, build_backend): dist_dir = os.path.abspath('pip-sdist') os.makedirs(dist_dir) @@ -392,6 +662,30 @@ class TestBuildMetaBackend: assert expected == sorted(actual) + def test_setup_requires_with_auto_discovery(self, tmpdir_cwd): + # Make sure patches introduced to retrieve setup_requires don't accidentally + # activate auto-discovery and cause problems due to the incomplete set of + # attributes passed to MinimalDistribution + files = { + 'pyproject.toml': DALS(""" + [project] + name = "proj" + version = "42" + """), + "setup.py": DALS(""" + __import__('setuptools').setup( + setup_requires=["foo"], + py_modules = ["hello", "world"] + ) + """), + 'hello.py': "'hello'", + 'world.py': "'world'", + } + path.build(files) + build_backend = self.get_build_backend() + setup_requires = build_backend.get_requires_for_build_wheel() + assert setup_requires == ["wheel", "foo"] + def test_dont_install_setup_requires(self, tmpdir_cwd): files = { 'setup.py': DALS(""" diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index 78a31ac4..19c8b780 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -18,7 +18,6 @@ def test_directories_in_package_data_glob(tmpdir_cwd): script_name='setup.py', script_args=['build_py'], packages=[''], - name='foo', package_data={'': ['path/*']}, )) os.makedirs('path/subpath') @@ -40,7 +39,6 @@ def test_read_only(tmpdir_cwd): script_args=['build_py'], packages=['pkg'], package_data={'pkg': ['data.dat']}, - name='pkg', )) os.makedirs('pkg') open('pkg/__init__.py', 'w').close() @@ -70,7 +68,6 @@ def test_executable_data(tmpdir_cwd): script_args=['build_py'], packages=['pkg'], package_data={'pkg': ['run-me']}, - name='pkg', )) os.makedirs('pkg') open('pkg/__init__.py', 'w').close() diff --git a/setuptools/tests/test_config_discovery.py b/setuptools/tests/test_config_discovery.py new file mode 100644 index 00000000..fac365f4 --- /dev/null +++ b/setuptools/tests/test_config_discovery.py @@ -0,0 +1,581 @@ +import os +import sys +from configparser import ConfigParser +from itertools import product + +from setuptools.command.sdist import sdist +from setuptools.dist import Distribution +from setuptools.discovery import find_package_path, find_parent_package +from setuptools.errors import PackageDiscoveryError + +import setuptools # noqa -- force distutils.core to be patched +import distutils.core + +import pytest +import jaraco.path +from path import Path as _Path + +from .contexts import quiet +from .integration.helpers import get_sdist_members, get_wheel_members, run +from .textwrap import DALS + + +class TestFindParentPackage: + def test_single_package(self, tmp_path): + # find_parent_package should find a non-namespace parent package + (tmp_path / "src/namespace/pkg/nested").mkdir(exist_ok=True, parents=True) + (tmp_path / "src/namespace/pkg/nested/__init__.py").touch() + (tmp_path / "src/namespace/pkg/__init__.py").touch() + packages = ["namespace", "namespace.pkg", "namespace.pkg.nested"] + assert find_parent_package(packages, {"": "src"}, tmp_path) == "namespace.pkg" + + def test_multiple_toplevel(self, tmp_path): + # find_parent_package should return null if the given list of packages does not + # have a single parent package + multiple = ["pkg", "pkg1", "pkg2"] + for name in multiple: + (tmp_path / f"src/{name}").mkdir(exist_ok=True, parents=True) + (tmp_path / f"src/{name}/__init__.py").touch() + assert find_parent_package(multiple, {"": "src"}, tmp_path) is None + + +class TestDiscoverPackagesAndPyModules: + """Make sure discovered values for ``packages`` and ``py_modules`` work + similarly to explicit configuration for the simple scenarios. + """ + OPTIONS = { + # Different options according to the circumstance being tested + "explicit-src": { + "package_dir": {"": "src"}, + "packages": ["pkg"] + }, + "variation-lib": { + "package_dir": {"": "lib"}, # variation of the source-layout + }, + "explicit-flat": { + "packages": ["pkg"] + }, + "explicit-single_module": { + "py_modules": ["pkg"] + }, + "explicit-namespace": { + "packages": ["ns", "ns.pkg"] + }, + "automatic-src": {}, + "automatic-flat": {}, + "automatic-single_module": {}, + "automatic-namespace": {} + } + FILES = { + "src": ["src/pkg/__init__.py", "src/pkg/main.py"], + "lib": ["lib/pkg/__init__.py", "lib/pkg/main.py"], + "flat": ["pkg/__init__.py", "pkg/main.py"], + "single_module": ["pkg.py"], + "namespace": ["ns/pkg/__init__.py"] + } + + def _get_info(self, circumstance): + _, _, layout = circumstance.partition("-") + files = self.FILES[layout] + options = self.OPTIONS[circumstance] + return files, options + + @pytest.mark.parametrize("circumstance", OPTIONS.keys()) + def test_sdist_filelist(self, tmp_path, circumstance): + files, options = self._get_info(circumstance) + _populate_project_dir(tmp_path, files, options) + + _, cmd = _run_sdist_programatically(tmp_path, options) + + manifest = [f.replace(os.sep, "/") for f in cmd.filelist.files] + for file in files: + assert any(f.endswith(file) for f in manifest) + + @pytest.mark.parametrize("circumstance", OPTIONS.keys()) + def test_project(self, tmp_path, circumstance): + files, options = self._get_info(circumstance) + _populate_project_dir(tmp_path, files, options) + + # Simulate a pre-existing `build` directory + (tmp_path / "build").mkdir() + (tmp_path / "build/lib").mkdir() + (tmp_path / "build/bdist.linux-x86_64").mkdir() + (tmp_path / "build/bdist.linux-x86_64/file.py").touch() + (tmp_path / "build/lib/__init__.py").touch() + (tmp_path / "build/lib/file.py").touch() + (tmp_path / "dist").mkdir() + (tmp_path / "dist/file.py").touch() + + _run_build(tmp_path) + + sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz"))) + print("~~~~~ sdist_members ~~~~~") + print('\n'.join(sdist_files)) + assert sdist_files >= set(files) + + wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl"))) + print("~~~~~ wheel_members ~~~~~") + print('\n'.join(wheel_files)) + orig_files = {f.replace("src/", "").replace("lib/", "") for f in files} + assert wheel_files >= orig_files + + # Make sure build files are not included by mistake + for file in wheel_files: + assert "build" not in files + assert "dist" not in files + + PURPOSEFULLY_EMPY = { + "setup.cfg": DALS( + """ + [metadata] + name = myproj + version = 0.0.0 + + [options] + {param} = + """ + ), + "setup.py": DALS( + """ + __import__('setuptools').setup( + name="myproj", + version="0.0.0", + {param}=[] + ) + """ + ), + "pyproject.toml": DALS( + """ + [build-system] + requires = [] + build-backend = 'setuptools.build_meta' + + [project] + name = "myproj" + version = "0.0.0" + + [tool.setuptools] + {param} = [] + """ + ), + "template-pyproject.toml": DALS( + """ + [build-system] + requires = [] + build-backend = 'setuptools.build_meta' + """ + ) + } + + @pytest.mark.parametrize( + "config_file, param, circumstance", + product( + ["setup.cfg", "setup.py", "pyproject.toml"], + ["packages", "py_modules"], + FILES.keys() + ) + ) + def test_purposefully_empty(self, tmp_path, config_file, param, circumstance): + files = self.FILES[circumstance] + ["mod.py", "other.py", "src/pkg/__init__.py"] + _populate_project_dir(tmp_path, files, {}) + + if config_file == "pyproject.toml": + template_param = param.replace("_", "-") + else: + # Make sure build works with or without setup.cfg + pyproject = self.PURPOSEFULLY_EMPY["template-pyproject.toml"] + (tmp_path / "pyproject.toml").write_text(pyproject) + template_param = param + + config = self.PURPOSEFULLY_EMPY[config_file].format(param=template_param) + (tmp_path / config_file).write_text(config) + + dist = _get_dist(tmp_path, {}) + # When either parameter package or py_modules is an empty list, + # then there should be no discovery + assert getattr(dist, param) == [] + other = {"py_modules": "packages", "packages": "py_modules"}[param] + assert getattr(dist, other) is None + + @pytest.mark.parametrize( + "extra_files, pkgs", + [ + (["venv/bin/simulate_venv"], {"pkg"}), + (["pkg-stubs/__init__.pyi"], {"pkg", "pkg-stubs"}), + (["other-stubs/__init__.pyi"], {"pkg", "other-stubs"}), + ( + # Type stubs can also be namespaced + ["namespace-stubs/pkg/__init__.pyi"], + {"pkg", "namespace-stubs", "namespace-stubs.pkg"}, + ), + ( + # Just the top-level package can have `-stubs`, ignore nested ones + ["namespace-stubs/pkg-stubs/__init__.pyi"], + {"pkg", "namespace-stubs"} + ), + (["_hidden/file.py"], {"pkg"}), + (["news/finalize.py"], {"pkg"}), + ] + ) + def test_flat_layout_with_extra_files(self, tmp_path, extra_files, pkgs): + files = self.FILES["flat"] + extra_files + _populate_project_dir(tmp_path, files, {}) + dist = _get_dist(tmp_path, {}) + assert set(dist.packages) == pkgs + + @pytest.mark.parametrize( + "extra_files", + [ + ["other/__init__.py"], + ["other/finalize.py"], + ] + ) + def test_flat_layout_with_dangerous_extra_files(self, tmp_path, extra_files): + files = self.FILES["flat"] + extra_files + _populate_project_dir(tmp_path, files, {}) + with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): + _get_dist(tmp_path, {}) + + def test_flat_layout_with_single_module(self, tmp_path): + files = self.FILES["single_module"] + ["invalid-module-name.py"] + _populate_project_dir(tmp_path, files, {}) + dist = _get_dist(tmp_path, {}) + assert set(dist.py_modules) == {"pkg"} + + def test_flat_layout_with_multiple_modules(self, tmp_path): + files = self.FILES["single_module"] + ["valid_module_name.py"] + _populate_project_dir(tmp_path, files, {}) + with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): + _get_dist(tmp_path, {}) + + +class TestNoConfig: + DEFAULT_VERSION = "0.0.0" # Default version given by setuptools + + EXAMPLES = { + "pkg1": ["src/pkg1.py"], + "pkg2": ["src/pkg2/__init__.py"], + "pkg3": ["src/pkg3/__init__.py", "src/pkg3-stubs/__init__.py"], + "pkg4": ["pkg4/__init__.py", "pkg4-stubs/__init__.py"], + "ns.nested.pkg1": ["src/ns/nested/pkg1/__init__.py"], + "ns.nested.pkg2": ["ns/nested/pkg2/__init__.py"], + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_discover_name(self, tmp_path, example): + _populate_project_dir(tmp_path, self.EXAMPLES[example], {}) + dist = _get_dist(tmp_path, {}) + assert dist.get_name() == example + + def test_build_with_discovered_name(self, tmp_path): + files = ["src/ns/nested/pkg/__init__.py"] + _populate_project_dir(tmp_path, files, {}) + _run_build(tmp_path, "--sdist") + # Expected distribution file + dist_file = tmp_path / f"dist/ns.nested.pkg-{self.DEFAULT_VERSION}.tar.gz" + assert dist_file.is_file() + + +class TestWithAttrDirective: + @pytest.mark.parametrize( + "folder, opts", + [ + ("src", {}), + ("lib", {"packages": "find:", "packages.find": {"where": "lib"}}), + ] + ) + def test_setupcfg_metadata(self, tmp_path, folder, opts): + files = [f"{folder}/pkg/__init__.py", "setup.cfg"] + _populate_project_dir(tmp_path, files, opts) + (tmp_path / folder / "pkg/__init__.py").write_text("version = 42") + (tmp_path / "setup.cfg").write_text( + "[metadata]\nversion = attr: pkg.version\n" + + (tmp_path / "setup.cfg").read_text() + ) + + dist = _get_dist(tmp_path, {}) + assert dist.get_name() == "pkg" + assert dist.get_version() == "42" + assert dist.package_dir + package_path = find_package_path("pkg", dist.package_dir, tmp_path) + assert os.path.exists(package_path) + assert folder in _Path(package_path).parts() + + _run_build(tmp_path, "--sdist") + dist_file = tmp_path / "dist/pkg-42.tar.gz" + assert dist_file.is_file() + + def test_pyproject_metadata(self, tmp_path): + _populate_project_dir(tmp_path, ["src/pkg/__init__.py"], {}) + (tmp_path / "src/pkg/__init__.py").write_text("version = 42") + (tmp_path / "pyproject.toml").write_text( + "[project]\nname = 'pkg'\ndynamic = ['version']\n" + "[tool.setuptools.dynamic]\nversion = {attr = 'pkg.version'}\n" + ) + dist = _get_dist(tmp_path, {}) + assert dist.get_version() == "42" + assert dist.package_dir == {"": "src"} + + +class TestWithCExtension: + def _simulate_package_with_extension(self, tmp_path): + # This example is based on: https://github.com/nucleic/kiwi/tree/1.4.0 + files = [ + "benchmarks/file.py", + "docs/Makefile", + "docs/requirements.txt", + "docs/source/conf.py", + "proj/header.h", + "proj/file.py", + "py/proj.cpp", + "py/other.cpp", + "py/file.py", + "py/py.typed", + "py/tests/test_proj.py", + "README.rst", + ] + _populate_project_dir(tmp_path, files, {}) + + setup_script = """ + from setuptools import Extension, setup + + ext_modules = [ + Extension( + "proj", + ["py/proj.cpp", "py/other.cpp"], + include_dirs=["."], + language="c++", + ), + ] + setup(ext_modules=ext_modules) + """ + (tmp_path / "setup.py").write_text(DALS(setup_script)) + + def test_skip_discovery_with_setupcfg_metadata(self, tmp_path): + """Ensure that auto-discovery is not triggered when the project is based on + C-extensions only, for backward compatibility. + """ + self._simulate_package_with_extension(tmp_path) + + pyproject = """ + [build-system] + requires = [] + build-backend = 'setuptools.build_meta' + """ + (tmp_path / "pyproject.toml").write_text(DALS(pyproject)) + + setupcfg = """ + [metadata] + name = proj + version = 42 + """ + (tmp_path / "setup.cfg").write_text(DALS(setupcfg)) + + dist = _get_dist(tmp_path, {}) + assert dist.get_name() == "proj" + assert dist.get_version() == "42" + assert dist.py_modules is None + assert dist.packages is None + assert len(dist.ext_modules) == 1 + assert dist.ext_modules[0].name == "proj" + + def test_dont_skip_discovery_with_pyproject_metadata(self, tmp_path): + """When opting-in to pyproject.toml metadata, auto-discovery will be active if + the package lists C-extensions, but does not configure py-modules or packages. + + This way we ensure users with complex package layouts that would lead to the + discovery of multiple top-level modules/packages see errors and are forced to + explicitly set ``packages`` or ``py-modules``. + """ + self._simulate_package_with_extension(tmp_path) + + pyproject = """ + [project] + name = 'proj' + version = '42' + """ + (tmp_path / "pyproject.toml").write_text(DALS(pyproject)) + with pytest.raises(PackageDiscoveryError, match="multiple (packages|modules)"): + _get_dist(tmp_path, {}) + + +class TestWithPackageData: + def _simulate_package_with_data_files(self, tmp_path, src_root): + files = [ + f"{src_root}/proj/__init__.py", + f"{src_root}/proj/file1.txt", + f"{src_root}/proj/nested/file2.txt", + ] + _populate_project_dir(tmp_path, files, {}) + + manifest = """ + global-include *.py *.txt + """ + (tmp_path / "MANIFEST.in").write_text(DALS(manifest)) + + EXAMPLE_SETUPCFG = """ + [metadata] + name = proj + version = 42 + + [options] + include_package_data = True + """ + EXAMPLE_PYPROJECT = """ + [project] + name = "proj" + version = "42" + """ + + PYPROJECT_PACKAGE_DIR = """ + [tool.setuptools] + package-dir = {"" = "src"} + """ + + @pytest.mark.parametrize( + "src_root, files", + [ + (".", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}), + (".", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}), + ("src", {"setup.cfg": DALS(EXAMPLE_SETUPCFG)}), + ("src", {"pyproject.toml": DALS(EXAMPLE_PYPROJECT)}), + ( + "src", + { + "setup.cfg": DALS(EXAMPLE_SETUPCFG) + DALS( + """ + packages = find: + package_dir = + =src + + [options.packages.find] + where = src + """ + ) + } + ), + ( + "src", + { + "pyproject.toml": DALS(EXAMPLE_PYPROJECT) + DALS( + """ + [tool.setuptools] + package-dir = {"" = "src"} + """ + ) + }, + ), + ] + ) + def test_include_package_data(self, tmp_path, src_root, files): + """ + Make sure auto-discovery does not affect package include_package_data. + See issue #3196. + """ + jaraco.path.build(files, prefix=str(tmp_path)) + self._simulate_package_with_data_files(tmp_path, src_root) + + expected = { + os.path.normpath(f"{src_root}/proj/file1.txt").replace(os.sep, "/"), + os.path.normpath(f"{src_root}/proj/nested/file2.txt").replace(os.sep, "/"), + } + + _run_build(tmp_path) + + sdist_files = get_sdist_members(next(tmp_path.glob("dist/*.tar.gz"))) + print("~~~~~ sdist_members ~~~~~") + print('\n'.join(sdist_files)) + assert sdist_files >= expected + + wheel_files = get_wheel_members(next(tmp_path.glob("dist/*.whl"))) + print("~~~~~ wheel_members ~~~~~") + print('\n'.join(wheel_files)) + orig_files = {f.replace("src/", "").replace("lib/", "") for f in expected} + assert wheel_files >= orig_files + + +def test_compatible_with_numpy_configuration(tmp_path): + files = [ + "dir1/__init__.py", + "dir2/__init__.py", + "file.py", + ] + _populate_project_dir(tmp_path, files, {}) + dist = Distribution({}) + dist.configuration = object() + dist.set_defaults() + assert dist.py_modules is None + assert dist.packages is None + + +def _populate_project_dir(root, files, options): + # NOTE: Currently pypa/build will refuse to build the project if no + # `pyproject.toml` or `setup.py` is found. So it is impossible to do + # completely "config-less" projects. + (root / "setup.py").write_text("import setuptools\nsetuptools.setup()") + (root / "README.md").write_text("# Example Package") + (root / "LICENSE").write_text("Copyright (c) 2018") + _write_setupcfg(root, options) + paths = (root / f for f in files) + for path in paths: + path.parent.mkdir(exist_ok=True, parents=True) + path.touch() + + +def _write_setupcfg(root, options): + if not options: + print("~~~~~ **NO** setup.cfg ~~~~~") + return + setupcfg = ConfigParser() + setupcfg.add_section("options") + for key, value in options.items(): + if key == "packages.find": + setupcfg.add_section(f"options.{key}") + setupcfg[f"options.{key}"].update(value) + elif isinstance(value, list): + setupcfg["options"][key] = ", ".join(value) + elif isinstance(value, dict): + str_value = "\n".join(f"\t{k} = {v}" for k, v in value.items()) + setupcfg["options"][key] = "\n" + str_value + else: + setupcfg["options"][key] = str(value) + with open(root / "setup.cfg", "w") as f: + setupcfg.write(f) + print("~~~~~ setup.cfg ~~~~~") + print((root / "setup.cfg").read_text()) + + +def _run_build(path, *flags): + cmd = [sys.executable, "-m", "build", "--no-isolation", *flags, str(path)] + return run(cmd, env={'DISTUTILS_DEBUG': ''}) + + +def _get_dist(dist_path, attrs): + root = "/".join(os.path.split(dist_path)) # POSIX-style + + script = dist_path / 'setup.py' + if script.exists(): + with _Path(dist_path): + dist = distutils.core.run_setup("setup.py", {}, stop_after="init") + else: + dist = Distribution(attrs) + + dist.src_root = root + dist.script_name = "setup.py" + with _Path(dist_path): + dist.parse_config_files() + + dist.set_defaults() + return dist + + +def _run_sdist_programatically(dist_path, attrs): + dist = _get_dist(dist_path, attrs) + cmd = sdist(dist) + cmd.ensure_finalized() + assert cmd.distribution.packages or cmd.distribution.py_modules + + with quiet(), _Path(dist_path): + cmd.run() + + return dist, cmd diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py index 70c5794c..c52072ac 100644 --- a/setuptools/tests/test_develop.py +++ b/setuptools/tests/test_develop.py @@ -6,11 +6,11 @@ import sys import subprocess import platform import pathlib -import textwrap from setuptools.command import test import pytest +import pip_run.launch from setuptools.command.develop import develop from setuptools.dist import Distribution @@ -166,21 +166,6 @@ class TestNamespaces: with test.test.paths_on_pythonpath([str(target)]): subprocess.check_call(pkg_resources_imp) - @staticmethod - def install_workaround(site_packages): - site_packages.mkdir(parents=True) - sc = site_packages / 'sitecustomize.py' - sc.write_text( - textwrap.dedent( - """ - import site - import pathlib - here = pathlib.Path(__file__).parent - site.addsitedir(str(here)) - """ - ).lstrip() - ) - @pytest.mark.xfail( platform.python_implementation() == 'PyPy', reason="Workaround fails on PyPy (why?)", @@ -190,7 +175,6 @@ class TestNamespaces: Editable install to a prefix should be discoverable. """ prefix = tmp_path / 'prefix' - prefix.mkdir() # figure out where pip will likely install the package site_packages = prefix / next( @@ -198,9 +182,10 @@ class TestNamespaces: for path in sys.path if 'site-packages' in path and path.startswith(sys.prefix) ) + site_packages.mkdir(parents=True) - # install the workaround - self.install_workaround(site_packages) + # install workaround + pip_run.launch.inject_sitecustomize(str(site_packages)) env = dict(os.environ, PYTHONPATH=str(site_packages)) cmd = [ @@ -219,6 +204,6 @@ class TestNamespaces: # now run 'sample' with the prefix on the PYTHONPATH bin = 'Scripts' if platform.system() == 'Windows' else 'bin' exe = prefix / bin / 'sample' - if sys.version_info < (3, 7) and platform.system() == 'Windows': + if sys.version_info < (3, 8) and platform.system() == 'Windows': exe = str(exe) subprocess.check_call([exe], env=env) diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py index c4279f0b..e7d2f5ca 100644 --- a/setuptools/tests/test_dist.py +++ b/setuptools/tests/test_dist.py @@ -2,6 +2,7 @@ import io import collections import re import functools +import os import urllib.request import urllib.parse from distutils.errors import DistutilsSetupError @@ -18,6 +19,7 @@ from setuptools import Distribution from .textwrap import DALS from .test_easy_install import make_nspkg_sdist +from .test_find_packages import ensure_files import pytest @@ -69,16 +71,19 @@ def test_dist__get_unpatched_deprecated(): pytest.warns(DistDeprecationWarning, _get_unpatched, [""]) +EXAMPLE_BASE_INFO = dict( + name="package", + version="0.0.1", + author="Foo Bar", + author_email="foo@bar.net", + long_description="Long\ndescription", + description="Short description", + keywords=["one", "two"], +) + + def __read_test_cases(): - base = dict( - name="package", - version="0.0.1", - author="Foo Bar", - author_email="foo@bar.net", - long_description="Long\ndescription", - description="Short description", - keywords=["one", "two"], - ) + base = EXAMPLE_BASE_INFO params = functools.partial(dict, base) @@ -374,3 +379,131 @@ def test_check_specifier(): ) def test_rfc822_unescape(content, result): assert (result or content) == rfc822_unescape(rfc822_escape(content)) + + +def test_metadata_name(): + with pytest.raises(DistutilsSetupError, match='missing.*name'): + Distribution()._validate_metadata() + + +@pytest.mark.parametrize( + "dist_name, py_module", + [ + ("my.pkg", "my_pkg"), + ("my-pkg", "my_pkg"), + ("my_pkg", "my_pkg"), + ("pkg", "pkg"), + ] +) +def test_dist_default_py_modules(tmp_path, dist_name, py_module): + (tmp_path / f"{py_module}.py").touch() + + (tmp_path / "setup.py").touch() + (tmp_path / "noxfile.py").touch() + # ^-- make sure common tool files are ignored + + attrs = { + **EXAMPLE_BASE_INFO, + "name": dist_name, + "src_root": str(tmp_path) + } + # Find `py_modules` corresponding to dist_name if not given + dist = Distribution(attrs) + dist.set_defaults() + assert dist.py_modules == [py_module] + # When `py_modules` is given, don't do anything + dist = Distribution({**attrs, "py_modules": ["explicity_py_module"]}) + dist.set_defaults() + assert dist.py_modules == ["explicity_py_module"] + # When `packages` is given, don't do anything + dist = Distribution({**attrs, "packages": ["explicity_package"]}) + dist.set_defaults() + assert not dist.py_modules + + +@pytest.mark.parametrize( + "dist_name, package_dir, package_files, packages", + [ + ("my.pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]), + ("my-pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]), + ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/mod.py"], ["my_pkg"]), + ("my.pkg", None, ["my/pkg/__init__.py"], ["my", "my.pkg"]), + ( + "my_pkg", + None, + ["src/my_pkg/__init__.py", "src/my_pkg2/__init__.py"], + ["my_pkg", "my_pkg2"] + ), + ( + "my_pkg", + {"pkg": "lib", "pkg2": "lib2"}, + ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"], + ["pkg", "pkg.nested", "pkg2"] + ), + ] +) +def test_dist_default_packages( + tmp_path, dist_name, package_dir, package_files, packages +): + ensure_files(tmp_path, package_files) + + (tmp_path / "setup.py").touch() + (tmp_path / "noxfile.py").touch() + # ^-- should not be included by default + + attrs = { + **EXAMPLE_BASE_INFO, + "name": dist_name, + "src_root": str(tmp_path), + "package_dir": package_dir + } + # Find `packages` either corresponding to dist_name or inside src + dist = Distribution(attrs) + dist.set_defaults() + assert not dist.py_modules + assert not dist.py_modules + assert set(dist.packages) == set(packages) + # When `py_modules` is given, don't do anything + dist = Distribution({**attrs, "py_modules": ["explicit_py_module"]}) + dist.set_defaults() + assert not dist.packages + assert set(dist.py_modules) == {"explicit_py_module"} + # When `packages` is given, don't do anything + dist = Distribution({**attrs, "packages": ["explicit_package"]}) + dist.set_defaults() + assert not dist.py_modules + assert set(dist.packages) == {"explicit_package"} + + +@pytest.mark.parametrize( + "dist_name, package_dir, package_files", + [ + ("my.pkg.nested", None, ["my/pkg/nested/__init__.py"]), + ("my.pkg", None, ["my/pkg/__init__.py", "my/pkg/file.py"]), + ("my_pkg", None, ["my_pkg.py"]), + ("my_pkg", None, ["my_pkg/__init__.py", "my_pkg/nested/__init__.py"]), + ("my_pkg", None, ["src/my_pkg/__init__.py", "src/my_pkg/nested/__init__.py"]), + ( + "my_pkg", + {"my_pkg": "lib", "my_pkg.lib2": "lib2"}, + ["lib/__init__.py", "lib/nested/__init__.pyt", "lib2/__init__.py"], + ), + # Should not try to guess a name from multiple py_modules/packages + ("UNKNOWN", None, ["src/mod1.py", "src/mod2.py"]), + ("UNKNOWN", None, ["src/pkg1/__ini__.py", "src/pkg2/__init__.py"]), + ] +) +def test_dist_default_name(tmp_path, dist_name, package_dir, package_files): + """Make sure dist.name is discovered from packages/py_modules""" + ensure_files(tmp_path, package_files) + attrs = { + **EXAMPLE_BASE_INFO, + "src_root": "/".join(os.path.split(tmp_path)), # POSIX-style + "package_dir": package_dir + } + del attrs["name"] + + dist = Distribution(attrs) + dist.set_defaults() + assert dist.py_modules or dist.packages + assert dist.get_name() == dist_name diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py index 29fbd09d..813ef51d 100644 --- a/setuptools/tests/test_dist_info.py +++ b/setuptools/tests/test_dist_info.py @@ -1,12 +1,21 @@ """Test .dist-info style distributions. """ +import pathlib +import re +import subprocess +import sys +from functools import partial import pytest import pkg_resources +from setuptools.archive_util import unpack_archive from .textwrap import DALS +read = partial(pathlib.Path.read_text, encoding="utf-8") + + class TestDistInfo: metadata_base = DALS(""" @@ -72,3 +81,78 @@ class TestDistInfo: pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'), ] assert d.extras == ['baz'] + + def test_invalid_version(self, tmp_path): + config = "[metadata]\nname=proj\nversion=42\n[egg_info]\ntag_build=invalid!!!\n" + (tmp_path / "setup.cfg").write_text(config, encoding="utf-8") + msg = re.compile("invalid version", re.M | re.I) + output = run_command("dist_info", cwd=tmp_path) + assert msg.search(output) + dist_info = next(tmp_path.glob("*.dist-info")) + assert dist_info.name.startswith("proj-42") + + +class TestWheelCompatibility: + """Make sure the .dist-info directory produced with the ``dist_info`` command + is the same as the one produced by ``bdist_wheel``. + """ + SETUPCFG = DALS(""" + [metadata] + name = {name} + version = {version} + + [options] + install_requires = foo>=12; sys_platform != "linux" + + [options.extras_require] + test = pytest + + [options.entry_points] + console_scripts = + executable-name = my_package.module:function + discover = + myproj = my_package.other_module:function + """) + + EGG_INFO_OPTS = [ + # Related: #3088 #2872 + ("", ""), + (".post", "[egg_info]\ntag_build = post\n"), + (".post", "[egg_info]\ntag_build = .post\n"), + (".post", "[egg_info]\ntag_build = post\ntag_date = 1\n"), + (".dev", "[egg_info]\ntag_build = .dev\n"), + (".dev", "[egg_info]\ntag_build = .dev\ntag_date = 1\n"), + ("a1", "[egg_info]\ntag_build = .a1\n"), + ("+local", "[egg_info]\ntag_build = +local\n"), + ] + + @pytest.mark.parametrize("name", "my-proj my_proj my.proj My.Proj".split()) + @pytest.mark.parametrize("version", ["0.42.13"]) + @pytest.mark.parametrize("suffix, cfg", EGG_INFO_OPTS) + def test_dist_info_is_the_same_as_in_wheel( + self, name, version, tmp_path, suffix, cfg + ): + config = self.SETUPCFG.format(name=name, version=version) + cfg + + for i in "dir_wheel", "dir_dist": + (tmp_path / i).mkdir() + (tmp_path / i / "setup.cfg").write_text(config, encoding="utf-8") + + run_command("bdist_wheel", cwd=tmp_path / "dir_wheel") + wheel = next(tmp_path.glob("dir_wheel/dist/*.whl")) + unpack_archive(wheel, tmp_path / "unpack") + wheel_dist_info = next(tmp_path.glob("unpack/*.dist-info")) + + run_command("dist_info", cwd=tmp_path / "dir_dist") + dist_info = next(tmp_path.glob("dir_dist/*.dist-info")) + + assert dist_info.name == wheel_dist_info.name + assert dist_info.name.startswith(f"{name.replace('-', '_')}-{version}{suffix}") + for file in "METADATA", "entry_points.txt": + assert read(dist_info / file) == read(wheel_dist_info / file) + + +def run_command(*cmd, **kwargs): + opts = {"stderr": subprocess.STDOUT, "text": True, **kwargs} + cmd = [sys.executable, "-c", "__import__('setuptools').setup()", *cmd] + return subprocess.check_output(cmd, **opts) diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py index b6b9c00e..df8f3541 100644 --- a/setuptools/tests/test_distutils_adoption.py +++ b/setuptools/tests/test_distutils_adoption.py @@ -1,38 +1,15 @@ import os import sys import functools -import subprocess import platform +import textwrap import pytest -import jaraco.envs -import path IS_PYPY = '__pypy__' in sys.builtin_module_names -class VirtualEnv(jaraco.envs.VirtualEnv): - name = '.env' - # Some version of PyPy will import distutils on startup, implicitly - # importing setuptools, and thus leading to BackendInvalid errors - # when upgrading Setuptools. Bypass this behavior by avoiding the - # early availability and need to upgrade. - create_opts = ['--no-setuptools'] - - def run(self, cmd, *args, **kwargs): - cmd = [self.exe(cmd[0])] + cmd[1:] - return subprocess.check_output(cmd, *args, cwd=self.root, **kwargs) - - -@pytest.fixture -def venv(tmp_path, tmp_src): - env = VirtualEnv() - env.root = path.Path(tmp_path / 'venv') - env.req = str(tmp_src) - return env.create() - - def popen_text(call): """ Augment the Popen call with the parameters to ensure unicode text. @@ -41,12 +18,35 @@ def popen_text(call): if sys.version_info < (3, 7) else functools.partial(call, text=True) +def win_sr(env): + """ + On Windows, SYSTEMROOT must be present to avoid + + > Fatal Python error: _Py_HashRandomization_Init: failed to + > get random numbers to initialize Python + """ + if env is None: + return + if platform.system() == 'Windows': + env['SYSTEMROOT'] = os.environ['SYSTEMROOT'] + return env + + def find_distutils(venv, imports='distutils', env=None, **kwargs): py_cmd = 'import {imports}; print(distutils.__file__)'.format(**locals()) cmd = ['python', '-c', py_cmd] - if platform.system() == 'Windows': - env['SYSTEMROOT'] = os.environ['SYSTEMROOT'] - return popen_text(venv.run)(cmd, env=env, **kwargs) + return popen_text(venv.run)(cmd, env=win_sr(env), **kwargs) + + +def count_meta_path(venv, env=None): + py_cmd = textwrap.dedent( + """ + import sys + is_distutils = lambda finder: finder.__class__.__name__ == "DistutilsMetaFinder" + print(len(list(filter(is_distutils, sys.meta_path)))) + """) + cmd = ['python', '-c', py_cmd] + return int(popen_text(venv.run)(cmd, env=win_sr(env))) def test_distutils_stdlib(venv): @@ -55,6 +55,7 @@ def test_distutils_stdlib(venv): """ env = dict(SETUPTOOLS_USE_DISTUTILS='stdlib') assert venv.name not in find_distutils(venv, env=env).split(os.sep) + assert count_meta_path(venv, env=env) == 0 def test_distutils_local_with_setuptools(venv): @@ -64,6 +65,7 @@ def test_distutils_local_with_setuptools(venv): env = dict(SETUPTOOLS_USE_DISTUTILS='local') loc = find_distutils(venv, imports='setuptools, distutils', env=env) assert venv.name in loc.split(os.sep) + assert count_meta_path(venv, env=env) <= 1 @pytest.mark.xfail('IS_PYPY', reason='pypy imports distutils on startup') @@ -74,3 +76,83 @@ def test_distutils_local(venv): """ env = dict(SETUPTOOLS_USE_DISTUTILS='local') assert venv.name in find_distutils(venv, env=env).split(os.sep) + assert count_meta_path(venv, env=env) <= 1 + + +def test_pip_import(venv): + """ + Ensure pip can be imported. + Regression test for #3002. + """ + cmd = ['python', '-c', 'import pip'] + popen_text(venv.run)(cmd) + + +def test_distutils_has_origin(): + """ + Distutils module spec should have an origin. #2990. + """ + assert __import__('distutils').__spec__.origin + + +ENSURE_IMPORTS_ARE_NOT_DUPLICATED = r""" +# Depending on the importlib machinery and _distutils_hack, some imports are +# duplicated resulting in different module objects being loaded, which prevents +# patches as shown in #3042. +# This script provides a way of verifying if this duplication is happening. + +from distutils import cmd +import distutils.command.sdist as sdist + +# import last to prevent caching +from distutils import {imported_module} + +for mod in (cmd, sdist): + assert mod.{imported_module} == {imported_module}, ( + f"\n{{mod.dir_util}}\n!=\n{{{imported_module}}}" + ) + +print("success") +""" + + +@pytest.mark.parametrize( + "distutils_version, imported_module", + [ + ("stdlib", "dir_util"), + ("stdlib", "file_util"), + ("stdlib", "archive_util"), + ("local", "dir_util"), + ("local", "file_util"), + ("local", "archive_util"), + ] +) +def test_modules_are_not_duplicated_on_import( + distutils_version, imported_module, tmpdir_cwd, venv +): + env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version) + script = ENSURE_IMPORTS_ARE_NOT_DUPLICATED.format(imported_module=imported_module) + cmd = ['python', '-c', script] + output = popen_text(venv.run)(cmd, env=win_sr(env)).strip() + assert output == "success" + + +ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED = r""" +# Similar to ENSURE_IMPORTS_ARE_NOT_DUPLICATED +import distutils.dist as dist +from distutils import log + +assert dist.log == log, ( + f"\n{dist.log}\n!=\n{log}" +) + +print("success") +""" + + +@pytest.mark.parametrize("distutils_version", "local stdlib".split()) +def test_log_module_is_not_duplicated_on_import(distutils_version, tmpdir_cwd, venv): + env = dict(SETUPTOOLS_USE_DISTUTILS=distutils_version) + cmd = ['python', '-c', ENSURE_LOG_IMPORT_IS_NOT_DUPLICATED] + output = popen_text(venv.run)(cmd, env=win_sr(env)).strip() + assert output == "success" diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index c7026852..64f2d631 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -14,6 +14,10 @@ import zipfile import mock import time import re +import subprocess +import pathlib +import warnings +import collections import pytest @@ -55,7 +59,7 @@ class FakeDist: SETUP_PY = DALS(""" from setuptools import setup - setup(name='foo') + setup() """) @@ -365,6 +369,63 @@ def mock_index(): return p_index +class TestInstallRequires: + def test_setup_install_includes_dependencies(self, tmp_path, mock_index): + """ + When ``python setup.py install`` is called directly, it will use easy_install + to fetch dependencies. + """ + # TODO: Remove these tests once `setup.py install` is completely removed + project_root = tmp_path / "project" + project_root.mkdir(exist_ok=True) + install_root = tmp_path / "install" + install_root.mkdir(exist_ok=True) + + self.create_project(project_root) + cmd = [ + sys.executable, + '-c', '__import__("setuptools").setup()', + 'install', + '--install-base', str(install_root), + '--install-lib', str(install_root), + '--install-headers', str(install_root), + '--install-scripts', str(install_root), + '--install-data', str(install_root), + '--install-purelib', str(install_root), + '--install-platlib', str(install_root), + ] + env = {"PYTHONPATH": str(install_root), "__EASYINSTALL_INDEX": mock_index.url} + with pytest.raises(subprocess.CalledProcessError) as exc_info: + subprocess.check_output( + cmd, cwd=str(project_root), env=env, stderr=subprocess.STDOUT, text=True + ) + try: + assert '/does-not-exist/' in {r.path for r in mock_index.requests} + assert next( + line + for line in exc_info.value.output.splitlines() + if "not find suitable distribution for" in line + and "does-not-exist" in line + ) + except Exception: + if "failed to get random numbers" in exc_info.value.output: + pytest.xfail(f"{sys.platform} failure - {exc_info.value.output}") + raise + + def create_project(self, root): + config = """ + [metadata] + name = project + version = 42 + + [options] + install_requires = does-not-exist + py_modules = mod + """ + (root / 'setup.cfg').write_text(DALS(config), encoding="utf-8") + (root / 'mod.py').touch() + + def make_trivial_sdist(dist_path, distname, version): """ Create a simple sdist tarball at dist_path, containing just a simple @@ -597,3 +658,102 @@ class TestWindowsScriptWriter: hdr = hdr.rstrip('\n') # header should not start with an escaped quote assert not hdr.startswith('\\"') + + +VersionStub = collections.namedtuple( + "VersionStub", "major, minor, micro, releaselevel, serial") + + +def test_use_correct_python_version_string(tmpdir, tmpdir_cwd, monkeypatch): + # In issue #3001, easy_install wrongly uses the `python3.1` directory + # when the interpreter is `python3.10` and the `--user` option is given. + # See pypa/setuptools#3001. + dist = Distribution() + cmd = dist.get_command_obj('easy_install') + cmd.args = ['ok'] + cmd.optimize = 0 + cmd.user = True + cmd.install_userbase = str(tmpdir) + cmd.install_usersite = None + install_cmd = dist.get_command_obj('install') + install_cmd.install_userbase = str(tmpdir) + install_cmd.install_usersite = None + + with monkeypatch.context() as patch, warnings.catch_warnings(): + warnings.simplefilter("ignore") + version = '3.10.1 (main, Dec 21 2021, 09:17:12) [GCC 10.2.1 20210110]' + info = VersionStub(3, 10, 1, "final", 0) + patch.setattr('site.ENABLE_USER_SITE', True) + patch.setattr('sys.version', version) + patch.setattr('sys.version_info', info) + patch.setattr(cmd, 'create_home_path', mock.Mock()) + cmd.finalize_options() + + name = "pypy" if hasattr(sys, 'pypy_version_info') else "python" + install_dir = cmd.install_dir.lower() + + # In some platforms (e.g. Windows), install_dir is mostly determined + # via `sysconfig`, which define constants eagerly at module creation. + # This means that monkeypatching `sys.version` to emulate 3.10 for testing + # may have no effect. + # The safest test here is to rely on the fact that 3.1 is no longer + # supported/tested, and make sure that if 'python3.1' ever appears in the string + # it is followed by another digit (e.g. 'python3.10'). + if re.search(name + r'3\.?1', install_dir): + assert re.search(name + r'3\.?1\d', install_dir) + + # The following "variables" are used for interpolation in distutils + # installation schemes, so it should be fair to treat them as "semi-public", + # or at least public enough so we can have a test to make sure they are correct + assert cmd.config_vars['py_version'] == '3.10.1' + assert cmd.config_vars['py_version_short'] == '3.10' + assert cmd.config_vars['py_version_nodot'] == '310' + + +def test_editable_user_and_build_isolation(setup_context, monkeypatch, tmp_path): + ''' `setup.py develop` should honor `--user` even under build isolation''' + + # == Arrange == + # Pretend that build isolation was enabled + # e.g pip sets the environment varible PYTHONNOUSERSITE=1 + monkeypatch.setattr('site.ENABLE_USER_SITE', False) + + # Patching $HOME for 2 reasons: + # 1. setuptools/command/easy_install.py:create_home_path + # tries creating directories in $HOME + # given `self.config_vars['DESTDIRS'] = "/home/user/.pyenv/versions/3.9.10 /home/user/.pyenv/versions/3.9.10/lib /home/user/.pyenv/versions/3.9.10/lib/python3.9 /home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload"`` # noqa: E501 + # it will `makedirs("/home/user/.pyenv/versions/3.9.10 /home/user/.pyenv/versions/3.9.10/lib /home/user/.pyenv/versions/3.9.10/lib/python3.9 /home/user/.pyenv/versions/3.9.10/lib/python3.9/lib-dynload")`` # noqa: E501 + # 2. We are going to force `site` to update site.USER_BASE and site.USER_SITE + # To point inside our new home + monkeypatch.setenv('HOME', str(tmp_path / '.home')) + monkeypatch.setenv('USERPROFILE', str(tmp_path / '.home')) + monkeypatch.setenv('APPDATA', str(tmp_path / '.home')) + monkeypatch.setattr('site.USER_BASE', None) + monkeypatch.setattr('site.USER_SITE', None) + user_site = pathlib.Path(site.getusersitepackages()) + user_site.mkdir(parents=True, exist_ok=True) + + sys_prefix = (tmp_path / '.sys_prefix') + sys_prefix.mkdir(parents=True, exist_ok=True) + monkeypatch.setattr('sys.prefix', str(sys_prefix)) + + setup_script = ( + "__import__('setuptools').setup(name='aproj', version=42, packages=[])\n" + ) + (tmp_path / "setup.py").write_text(setup_script, encoding="utf-8") + + # == Sanity check == + assert list(sys_prefix.glob("*")) == [] + assert list(user_site.glob("*")) == [] + + # == Act == + run_setup('setup.py', ['develop', '--user']) + + # == Assert == + # Should not install to sys.prefix + assert list(sys_prefix.glob("*")) == [] + # Should install to user site + installed = {f.name for f in user_site.glob("*")} + # sometimes easy-install.pth is created and sometimes not + installed = installed - {"easy-install.pth"} + assert installed == {'aproj.egg-link'} diff --git a/setuptools/tests/test_editable_install.py b/setuptools/tests/test_editable_install.py new file mode 100644 index 00000000..aac4f5ee --- /dev/null +++ b/setuptools/tests/test_editable_install.py @@ -0,0 +1,113 @@ +import subprocess +from textwrap import dedent + +import pytest +import jaraco.envs +import path + + +@pytest.fixture +def venv(tmp_path, setuptools_wheel): + env = jaraco.envs.VirtualEnv() + vars(env).update( + root=path.Path(tmp_path), # workaround for error on windows + name=".venv", + create_opts=["--no-setuptools"], + req=str(setuptools_wheel), + ) + return env.create() + + +EXAMPLE = { + 'pyproject.toml': dedent("""\ + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + + [project] + name = "mypkg" + version = "3.14159" + license = {text = "MIT"} + description = "This is a Python package" + dynamic = ["readme"] + classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers" + ] + urls = {Homepage = "http://github.com"} + dependencies = ['importlib-metadata; python_version<"3.8"'] + + [tool.setuptools] + package-dir = {"" = "src"} + packages = {find = {where = ["src"]}} + license-files = ["LICENSE*"] + + [tool.setuptools.dynamic] + readme = {file = "README.rst"} + + [tool.distutils.egg_info] + tag-build = ".post0" + """), + "MANIFEST.in": dedent("""\ + global-include *.py *.txt + global-exclude *.py[cod] + """).strip(), + "README.rst": "This is a ``README``", + "LICENSE.txt": "---- placeholder MIT license ----", + "src": { + "mypkg": { + "__init__.py": dedent("""\ + import sys + + if sys.version_info[:2] >= (3, 8): + from importlib.metadata import PackageNotFoundError, version + else: + from importlib_metadata import PackageNotFoundError, version + + try: + __version__ = version(__name__) + except PackageNotFoundError: + __version__ = "unknown" + """), + "__main__.py": dedent("""\ + from importlib.resources import read_text + from . import __version__, __name__ as parent + from .mod import x + + data = read_text(parent, "data.txt") + print(__version__, data, x) + """), + "mod.py": "x = ''", + "data.txt": "Hello World", + } + } +} + + +SETUP_SCRIPT_STUB = "__import__('setuptools').setup()" +MISSING_SETUP_SCRIPT = pytest.param( + None, + marks=pytest.mark.xfail( + reason="Editable install is currently only supported with `setup.py`" + ) +) + + +@pytest.mark.parametrize("setup_script", [SETUP_SCRIPT_STUB, MISSING_SETUP_SCRIPT]) +def test_editable_with_pyproject(tmp_path, venv, setup_script): + project = tmp_path / "mypkg" + files = {**EXAMPLE, "setup.py": setup_script} + project.mkdir() + jaraco.path.build(files, prefix=project) + + cmd = [venv.exe(), "-m", "pip", "install", + "--no-build-isolation", # required to force current version of setuptools + "-e", str(project)] + print(str(subprocess.check_output(cmd), "utf-8")) + + cmd = [venv.exe(), "-m", "mypkg"] + assert subprocess.check_output(cmd).strip() == b"3.14159.post0 Hello World" + + (project / "src/mypkg/data.txt").write_text("foobar") + (project / "src/mypkg/mod.py").write_text("x = 42") + assert subprocess.check_output(cmd).strip() == b"3.14159.post0 foobar 42" diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py index 906713f6..efcce924 100644 --- a/setuptools/tests/test_find_packages.py +++ b/setuptools/tests/test_find_packages.py @@ -1,4 +1,4 @@ -"""Tests for setuptools.find_packages().""" +"""Tests for automatic package discovery""" import os import sys import shutil @@ -9,6 +9,7 @@ import pytest from setuptools import find_packages from setuptools import find_namespace_packages +from setuptools.discovery import FlatLayoutPackageFinder # modeled after CPython's test.support.can_symlink @@ -178,3 +179,67 @@ class TestFindPackages: shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) packages = find_namespace_packages(self.dist_dir) self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + +class TestFlatLayoutPackageFinder: + EXAMPLES = { + "hidden-folders": ( + [".pkg/__init__.py", "pkg/__init__.py", "pkg/nested/file.txt"], + ["pkg", "pkg.nested"] + ), + "private-packages": ( + ["_pkg/__init__.py", "pkg/_private/__init__.py"], + ["pkg", "pkg._private"] + ), + "invalid-name": ( + ["invalid-pkg/__init__.py", "other.pkg/__init__.py", "yet,another/file.py"], + [] + ), + "docs": ( + ["pkg/__init__.py", "docs/conf.py", "docs/readme.rst"], + ["pkg"] + ), + "tests": ( + ["pkg/__init__.py", "tests/test_pkg.py", "tests/__init__.py"], + ["pkg"] + ), + "examples": ( + [ + "pkg/__init__.py", + "examples/__init__.py", + "examples/file.py" + "example/other_file.py", + # Sub-packages should always be fine + "pkg/example/__init__.py", + "pkg/examples/__init__.py", + ], + ["pkg", "pkg.examples", "pkg.example"] + ), + "tool-specific": ( + [ + "pkg/__init__.py", + "tasks/__init__.py", + "tasks/subpackage/__init__.py", + "fabfile/__init__.py", + "fabfile/subpackage/__init__.py", + # Sub-packages should always be fine + "pkg/tasks/__init__.py", + "pkg/fabfile/__init__.py", + ], + ["pkg", "pkg.tasks", "pkg.fabfile"] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_unwanted_directories_not_included(self, tmp_path, example): + files, expected_packages = self.EXAMPLES[example] + ensure_files(tmp_path, files) + found_packages = FlatLayoutPackageFinder.find(str(tmp_path)) + assert set(found_packages) == set(expected_packages) + + +def ensure_files(root_path, files): + for file in files: + path = root_path / file + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() diff --git a/setuptools/tests/test_find_py_modules.py b/setuptools/tests/test_find_py_modules.py new file mode 100644 index 00000000..4ef68801 --- /dev/null +++ b/setuptools/tests/test_find_py_modules.py @@ -0,0 +1,81 @@ +"""Tests for automatic discovery of modules""" +import os + +import pytest + +from setuptools.discovery import FlatLayoutModuleFinder, ModuleFinder + +from .test_find_packages import ensure_files, has_symlink + + +class TestModuleFinder: + def find(self, path, *args, **kwargs): + return set(ModuleFinder.find(str(path), *args, **kwargs)) + + EXAMPLES = { + # circumstance: (files, kwargs, expected_modules) + "simple_folder": ( + ["file.py", "other.py"], + {}, # kwargs + ["file", "other"], + ), + "exclude": ( + ["file.py", "other.py"], + {"exclude": ["f*"]}, + ["other"], + ), + "include": ( + ["file.py", "fole.py", "other.py"], + {"include": ["f*"], "exclude": ["fo*"]}, + ["file"], + ), + "invalid-name": ( + ["my-file.py", "other.file.py"], + {}, + [] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_finder(self, tmp_path, example): + files, kwargs, expected_modules = self.EXAMPLES[example] + ensure_files(tmp_path, files) + assert self.find(tmp_path, **kwargs) == set(expected_modules) + + @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') + def test_symlinked_packages_are_included(self, tmp_path): + src = "_myfiles/file.py" + ensure_files(tmp_path, [src]) + os.symlink(tmp_path / src, tmp_path / "link.py") + assert self.find(tmp_path) == {"link"} + + +class TestFlatLayoutModuleFinder: + def find(self, path, *args, **kwargs): + return set(FlatLayoutModuleFinder.find(str(path))) + + EXAMPLES = { + # circumstance: (files, expected_modules) + "hidden-files": ( + [".module.py"], + [] + ), + "private-modules": ( + ["_module.py"], + [] + ), + "common-names": ( + ["setup.py", "conftest.py", "test.py", "tests.py", "example.py", "mod.py"], + ["mod"] + ), + "tool-specific": ( + ["tasks.py", "fabfile.py", "noxfile.py", "dodo.py", "manage.py", "mod.py"], + ["mod"] + ) + } + + @pytest.mark.parametrize("example", EXAMPLES.keys()) + def test_unwanted_files_not_included(self, tmp_path, example): + files, expected_modules = self.EXAMPLES[example] + ensure_files(tmp_path, files) + assert self.find(tmp_path) == set(expected_modules) diff --git a/setuptools/tests/test_logging.py b/setuptools/tests/test_logging.py new file mode 100644 index 00000000..a5ddd56d --- /dev/null +++ b/setuptools/tests/test_logging.py @@ -0,0 +1,36 @@ +import logging + +import pytest + + +setup_py = """\ +from setuptools import setup + +setup( + name="test_logging", + version="0.0" +) +""" + + +@pytest.mark.parametrize( + "flag, expected_level", [("--dry-run", "INFO"), ("--verbose", "DEBUG")] +) +def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level): + """Make sure the correct verbosity level is set (issue #3038)""" + import setuptools # noqa: Import setuptools to monkeypatch distutils + import distutils # <- load distutils after all the patches take place + + logger = logging.Logger(__name__) + monkeypatch.setattr(logging, "root", logger) + unset_log_level = logger.getEffectiveLevel() + assert logging.getLevelName(unset_log_level) == "NOTSET" + + setup_script = tmp_path / "setup.py" + setup_script.write_text(setup_py) + dist = distutils.core.run_setup(setup_script, stop_after="init") + dist.script_args = [flag, "sdist"] + dist.parse_command_line() # <- where the log level is set + log_level = logger.getEffectiveLevel() + log_level_name = logging.getLevelName(log_level) + assert log_level_name == expected_level diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py index 66f46ad0..302cff73 100644 --- a/setuptools/tests/test_sdist.py +++ b/setuptools/tests/test_sdist.py @@ -10,7 +10,7 @@ from unittest import mock import pytest -import pkg_resources +from setuptools._importlib import metadata from setuptools import SetuptoolsDeprecationWarning from setuptools.command.sdist import sdist from setuptools.command.egg_info import manifest_maker @@ -529,7 +529,9 @@ def test_default_revctrl(): This interface must be maintained until Ubuntu 12.04 is no longer supported (by Setuptools). """ - ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl' - ep = pkg_resources.EntryPoint.parse(ep_def) - res = ep.resolve() + ep, = metadata.EntryPoints._from_text(""" + [setuptools.file_finders] + svn_cvs = setuptools.command.sdist:_default_revctrl + """) + res = ep.load() assert hasattr(res, '__iter__') diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py index 42f8e18b..0640f49d 100644 --- a/setuptools/tests/test_setuptools.py +++ b/setuptools/tests/test_setuptools.py @@ -7,16 +7,23 @@ import distutils.cmd from distutils.errors import DistutilsOptionError from distutils.errors import DistutilsSetupError from distutils.core import Extension -from distutils.version import LooseVersion +from zipfile import ZipFile import pytest +from setuptools.extern.packaging import version + import setuptools import setuptools.dist import setuptools.depends as dep from setuptools.depends import Require +@pytest.fixture(autouse=True) +def isolated_dir(tmpdir_cwd): + yield + + def makeSetup(**args): """Return distribution from 'setup(**args)', without executing commands""" @@ -84,12 +91,12 @@ class TestDepends: assert req.name == 'Json' assert req.module == 'json' - assert req.requested_version == '1.0.3' + assert req.requested_version == version.Version('1.0.3') assert req.attribute == '__version__' assert req.full_name() == 'Json-1.0.3' from json import __version__ - assert req.get_version() == __version__ + assert str(req.get_version()) == __version__ assert req.version_ok('1.0.9') assert not req.version_ok('0.9.1') assert not req.version_ok('unknown') @@ -97,11 +104,6 @@ class TestDepends: assert req.is_present() assert req.is_current() - req = Require('Json 3000', '03000', 'json', format=LooseVersion) - assert req.is_present() - assert not req.is_current() - assert not req.version_ok('unknown') - req = Require('Do-what-I-mean', '1.0', 'd-w-i-m') assert not req.is_present() assert not req.is_current() @@ -293,3 +295,16 @@ def test_findall_missing_symlink(tmpdir, can_symlink): os.symlink('foo', 'bar') found = list(setuptools.findall()) assert found == [] + + +def test_its_own_wheel_does_not_contain_tests(setuptools_wheel): + with ZipFile(setuptools_wheel) as zipfile: + contents = [f.replace(os.sep, '/') for f in zipfile.namelist()] + + for member in contents: + assert '/tests/' not in member + + +def test_convert_path_deprecated(): + with pytest.warns(setuptools.SetuptoolsDeprecationWarning): + setuptools.convert_path('setuptools/tests') diff --git a/setuptools/tests/test_sphinx_upload_docs.py b/setuptools/tests/test_sphinx_upload_docs.py deleted file mode 100644 index cc5b8293..00000000 --- a/setuptools/tests/test_sphinx_upload_docs.py +++ /dev/null @@ -1,38 +0,0 @@ -import pytest - -from jaraco import path - -from setuptools.command.upload_docs import upload_docs -from setuptools.dist import Distribution - - -@pytest.fixture -def sphinx_doc_sample_project(tmpdir_cwd): - path.build({ - 'setup.py': 'from setuptools import setup; setup()', - 'build': { - 'docs': { - 'conf.py': 'project="test"', - 'index.rst': ".. toctree::\ - :maxdepth: 2\ - :caption: Contents:", - }, - }, - }) - - -@pytest.mark.usefixtures('sphinx_doc_sample_project') -class TestSphinxUploadDocs: - def test_sphinx_doc(self): - params = dict( - name='foo', - packages=['test'], - ) - dist = Distribution(params) - - cmd = upload_docs(dist) - - cmd.initialize_options() - assert cmd.upload_dir is None - assert cmd.has_sphinx() is True - cmd.finalize_options() diff --git a/setuptools/tests/test_test.py b/setuptools/tests/test_test.py index 8b8d9e6c..530474d7 100644 --- a/setuptools/tests/test_test.py +++ b/setuptools/tests/test_test.py @@ -10,7 +10,6 @@ from .textwrap import DALS @pytest.mark.usefixtures('tmpdir_cwd') def test_tests_are_run_once(capfd): params = dict( - name='foo', packages=['dummy'], ) files = { diff --git a/setuptools/tests/test_upload_docs.py b/setuptools/tests/test_upload_docs.py deleted file mode 100644 index 55978aad..00000000 --- a/setuptools/tests/test_upload_docs.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -import zipfile -import contextlib - -import pytest -from jaraco import path - -from setuptools.command.upload_docs import upload_docs -from setuptools.dist import Distribution - -from .textwrap import DALS -from . import contexts - - -@pytest.fixture -def sample_project(tmpdir_cwd): - path.build({ - 'setup.py': DALS(""" - from setuptools import setup - - setup(name='foo') - """), - 'build': { - 'index.html': 'Hello world.', - 'empty': {}, - } - }) - - -@pytest.mark.usefixtures('sample_project') -@pytest.mark.usefixtures('user_override') -class TestUploadDocsTest: - def test_create_zipfile(self): - """ - Ensure zipfile creation handles common cases, including a folder - containing an empty folder. - """ - - dist = Distribution() - - cmd = upload_docs(dist) - cmd.target_dir = cmd.upload_dir = 'build' - with contexts.tempdir() as tmp_dir: - tmp_file = os.path.join(tmp_dir, 'foo.zip') - zip_file = cmd.create_zipfile(tmp_file) - - assert zipfile.is_zipfile(tmp_file) - - with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file: - assert zip_file.namelist() == ['index.html'] - - def test_build_multipart(self): - data = dict( - a="foo", - b="bar", - file=('file.txt', b'content'), - ) - body, content_type = upload_docs._build_multipart(data) - assert 'form-data' in content_type - assert "b'" not in content_type - assert 'b"' not in content_type - assert isinstance(body, bytes) - assert b'foo' in body - assert b'content' in body diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py index 00f5f185..65358543 100644 --- a/setuptools/tests/test_virtualenv.py +++ b/setuptools/tests/test_virtualenv.py @@ -1,139 +1,95 @@ -import glob import os import sys -import itertools +import subprocess +from urllib.request import urlopen +from urllib.error import URLError import pathlib import pytest -from pytest_fixture_config import yield_requires_config - -import pytest_virtualenv +from . import contexts from .textwrap import DALS from .test_easy_install import make_nspkg_sdist @pytest.fixture(autouse=True) -def pytest_virtualenv_works(virtualenv): +def pytest_virtualenv_works(venv): """ pytest_virtualenv may not work. if it doesn't, skip these tests. See #1284. """ - venv_prefix = virtualenv.run( - 'python -c "import sys; print(sys.prefix)"', - capture=True, - ).strip() + venv_prefix = venv.run(["python" , "-c", "import sys; print(sys.prefix)"]).strip() if venv_prefix == sys.prefix: pytest.skip("virtualenv is broken (see pypa/setuptools#1284)") -@yield_requires_config(pytest_virtualenv.CONFIG, ['virtualenv_executable']) -@pytest.fixture(scope='function') -def bare_virtualenv(): - """ Bare virtualenv (no pip/setuptools/wheel). - """ - with pytest_virtualenv.VirtualEnv(args=( - '--no-wheel', - '--no-pip', - '--no-setuptools', - )) as venv: - yield venv - - -def test_clean_env_install(bare_virtualenv, tmp_src): +def test_clean_env_install(venv_without_setuptools, setuptools_wheel): """ Check setuptools can be installed in a clean environment. """ - cmd = [bare_virtualenv.python, 'setup.py', 'install'] - bare_virtualenv.run(cmd, cd=tmp_src) - + cmd = ["python", "-m", "pip", "install", str(setuptools_wheel)] + venv_without_setuptools.run(cmd) -def _get_pip_versions(): - # This fixture will attempt to detect if tests are being run without - # network connectivity and if so skip some tests - network = True +def access_pypi(): + # Detect if tests are being run without connectivity if not os.environ.get('NETWORK_REQUIRED', False): # pragma: nocover try: - from urllib.request import urlopen - from urllib.error import URLError - except ImportError: - from urllib2 import urlopen, URLError # Python 2.7 compat - - try: urlopen('https://pypi.org', timeout=1) except URLError: # No network, disable most of these tests - network = False + return False - def mark(param, *marks): - if not isinstance(param, type(pytest.param(''))): - param = pytest.param(param) - return param._replace(marks=param.marks + marks) + return True - def skip_network(param): - return param if network else mark(param, pytest.mark.skip(reason="no network")) - network_versions = [ - mark('pip<20', pytest.mark.xfail(reason='pypa/pip#6599')), +@pytest.mark.skipif( + 'platform.python_implementation() == "PyPy"', + reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995", +) +@pytest.mark.skipif(not access_pypi(), reason="no network") +# ^-- Even when it is not necessary to install a different version of `pip` +# the build process will still try to download `wheel`, see #3147 and #2986. +@pytest.mark.parametrize( + 'pip_version', + [ + None, + pytest.param('pip<20', marks=pytest.mark.xfail(reason='pypa/pip#6599')), 'pip<20.1', 'pip<21', 'pip<22', - mark( + pytest.param( 'https://github.com/pypa/pip/archive/main.zip', - pytest.mark.skipif('sys.version_info < (3, 7)'), + marks=pytest.mark.xfail(reason='#2975'), ), ] - - versions = itertools.chain( - [None], - map(skip_network, network_versions) - ) - - return list(versions) - - -@pytest.mark.skipif( - 'platform.python_implementation() == "PyPy"', - reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995", ) -@pytest.mark.parametrize('pip_version', _get_pip_versions()) -def test_pip_upgrade_from_source(pip_version, tmp_src, virtualenv): +def test_pip_upgrade_from_source(pip_version, venv_without_setuptools, + setuptools_wheel, setuptools_sdist): """ Check pip can upgrade setuptools from source. """ - # Install pip/wheel, and remove setuptools (as it + # Install pip/wheel, in a venv without setuptools (as it # should not be needed for bootstraping from source) - if pip_version is None: - upgrade_pip = () - else: - upgrade_pip = ('python -m pip install -U "{pip_version}" --retries=1',) - virtualenv.run(' && '.join(( - 'pip uninstall -y setuptools', - 'pip install -U wheel', - ) + upgrade_pip).format(pip_version=pip_version)) - dist_dir = virtualenv.workspace - # Generate source distribution / wheel. - virtualenv.run(' && '.join(( - 'python setup.py -q sdist -d {dist}', - 'python setup.py -q bdist_wheel -d {dist}', - )).format(dist=dist_dir), cd=tmp_src) - sdist = glob.glob(os.path.join(dist_dir, '*.zip'))[0] - wheel = glob.glob(os.path.join(dist_dir, '*.whl'))[0] - # Then update from wheel. - virtualenv.run('pip install ' + wheel) + venv = venv_without_setuptools + venv.run(["pip", "install", "-U", "wheel"]) + if pip_version is not None: + venv.run(["python", "-m", "pip", "install", "-U", pip_version, "--retries=1"]) + with pytest.raises(subprocess.CalledProcessError): + # Meta-test to make sure setuptools is not installed + venv.run(["python", "-c", "import setuptools"]) + + # Then install from wheel. + venv.run(["pip", "install", str(setuptools_wheel)]) # And finally try to upgrade from source. - virtualenv.run('pip install --no-cache-dir --upgrade ' + sdist) + venv.run(["pip", "install", "--no-cache-dir", "--upgrade", str(setuptools_sdist)]) -def _check_test_command_install_requirements(virtualenv, tmpdir, cwd): +def _check_test_command_install_requirements(venv, tmpdir): """ Check the test command will install all required dependencies. """ - # Install setuptools. - virtualenv.run('python setup.py develop', cd=cwd) - def sdist(distname, version): dist_path = tmpdir.join('%s-%s.tar.gz' % (distname, version)) make_nspkg_sdist(str(dist_path), distname, version) @@ -182,28 +138,24 @@ def _check_test_command_install_requirements(virtualenv, tmpdir, cwd): open('success', 'w').close() ''')) - # Run test command for test package. - # use 'virtualenv.python' as workaround for man-group/pytest-plugins#166 - cmd = [virtualenv.python, 'setup.py', 'test', '-s', 'test'] - virtualenv.run(cmd, cd=str(tmpdir)) + + cmd = ["python", 'setup.py', 'test', '-s', 'test'] + venv.run(cmd, cwd=str(tmpdir)) assert tmpdir.join('success').check() -def test_test_command_install_requirements(virtualenv, tmpdir, request): +def test_test_command_install_requirements(venv, tmpdir, tmpdir_cwd): # Ensure pip/wheel packages are installed. - virtualenv.run( - "python -c \"__import__('pkg_resources').require(['pip', 'wheel'])\"") - # uninstall setuptools so that 'setup.py develop' works - virtualenv.run("python -m pip uninstall -y setuptools") + venv.run(["python", "-c", "__import__('pkg_resources').require(['pip', 'wheel'])"]) # disable index URL so bits and bobs aren't requested from PyPI - virtualenv.env['PIP_NO_INDEX'] = '1' - _check_test_command_install_requirements(virtualenv, tmpdir, request.config.rootdir) + with contexts.environment(PYTHONPATH=None, PIP_NO_INDEX="1"): + _check_test_command_install_requirements(venv, tmpdir) -def test_no_missing_dependencies(bare_virtualenv, request): +def test_no_missing_dependencies(bare_venv, request): """ Quick and dirty test to ensure all external dependencies are vendored. """ + setuptools_dir = request.config.rootdir for command in ('upload',): # sorted(distutils.command.__all__): - cmd = [bare_virtualenv.python, 'setup.py', command, '-h'] - bare_virtualenv.run(cmd, cd=request.config.rootdir) + bare_venv.run(['python', 'setup.py', command, '-h'], cwd=setuptools_dir) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py index 7345b135..89d65d0b 100644 --- a/setuptools/tests/test_wheel.py +++ b/setuptools/tests/test_wheel.py @@ -6,6 +6,8 @@ from distutils.sysconfig import get_config_var from distutils.util import get_platform import contextlib +import pathlib +import stat import glob import inspect import os @@ -148,6 +150,7 @@ def _check_wheel_install(filename, install_dir, install_tree_includes, if requires_txt is None: assert not dist.has_metadata('requires.txt') else: + # Order must match to ensure reproducibility. assert requires_txt == dist.get_metadata('requires.txt').lstrip() @@ -420,6 +423,38 @@ WHEEL_INSTALL_TESTS = ( ), dict( + id='requires_ensure_order', + install_requires=''' + foo + bar + baz + qux + ''', + extras_require={ + 'extra': ''' + foobar>3 + barbaz>4 + bazqux>5 + quxzap>6 + ''', + }, + requires_txt=DALS( + ''' + foo + bar + baz + qux + + [extra] + foobar>3 + barbaz>4 + bazqux>5 + quxzap>6 + ''' + ), + ), + + dict( id='namespace_package', file_defs={ 'foo': { @@ -581,3 +616,88 @@ def test_wheel_is_compatible(monkeypatch): monkeypatch.setattr('setuptools.wheel.sys_tags', sys_tags) assert Wheel( 'onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible() + + +def test_wheel_mode(): + @contextlib.contextmanager + def build_wheel(extra_file_defs=None, **kwargs): + file_defs = { + 'setup.py': (DALS( + ''' + # -*- coding: utf-8 -*- + from setuptools import setup + import setuptools + setup(**%r) + ''' + ) % kwargs).encode('utf-8'), + } + if extra_file_defs: + file_defs.update(extra_file_defs) + with tempdir() as source_dir: + path.build(file_defs, source_dir) + runsh = pathlib.Path(source_dir) / "script.sh" + os.chmod(runsh, 0o777) + subprocess.check_call((sys.executable, 'setup.py', + '-q', 'bdist_wheel'), cwd=source_dir) + yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0] + + params = dict( + id='script', + file_defs={ + 'script.py': DALS( + ''' + #/usr/bin/python + print('hello world!') + ''' + ), + 'script.sh': DALS( + ''' + #/bin/sh + echo 'hello world!' + ''' + ), + }, + setup_kwargs=dict( + scripts=['script.py', 'script.sh'], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + {'scripts': [ + 'script.py', + 'script.sh' + ]} + + ] + } + }) + ) + + project_name = params.get('name', 'foo') + version = params.get('version', '1.0') + install_tree = params.get('install_tree') + file_defs = params.get('file_defs', {}) + setup_kwargs = params.get('setup_kwargs', {}) + + with build_wheel( + name=project_name, + version=version, + install_requires=[], + extras_require={}, + extra_file_defs=file_defs, + **setup_kwargs + ) as filename, tempdir() as install_dir: + _check_wheel_install(filename, install_dir, + install_tree, project_name, + version, None) + w = Wheel(filename) + base = pathlib.Path(install_dir) / w.egg_name() + script_sh = base / "EGG-INFO" / "scripts" / "script.sh" + assert script_sh.exists() + if sys.platform != 'win32': + # Editable file mode has no effect on Windows + assert oct(stat.S_IMODE(script_sh.stat().st_mode)) == "0o777" |