diff options
Diffstat (limited to 'setuptools/tests')
52 files changed, 9173 insertions, 0 deletions
diff --git a/setuptools/tests/__init__.py b/setuptools/tests/__init__.py new file mode 100644 index 00000000..564adf2b --- /dev/null +++ b/setuptools/tests/__init__.py @@ -0,0 +1,10 @@ +import locale + +import pytest + + +__all__ = ['fail_on_ascii'] + + +is_ascii = locale.getpreferredencoding() == 'ANSI_X3.4-1968' +fail_on_ascii = pytest.mark.xfail(is_ascii, reason="Test fails in this locale") diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py new file mode 100644 index 00000000..51ce8984 --- /dev/null +++ b/setuptools/tests/contexts.py @@ -0,0 +1,98 @@ +import tempfile +import os +import shutil +import sys +import contextlib +import site +import io + +import pkg_resources + + +@contextlib.contextmanager +def tempdir(cd=lambda dir: None, **kwargs): + temp_dir = tempfile.mkdtemp(**kwargs) + orig_dir = os.getcwd() + try: + cd(temp_dir) + yield temp_dir + finally: + cd(orig_dir) + shutil.rmtree(temp_dir) + + +@contextlib.contextmanager +def environment(**replacements): + """ + In a context, patch the environment with replacements. Pass None values + to clear the values. + """ + saved = dict( + (key, os.environ[key]) + for key in replacements + if key in os.environ + ) + + # remove values that are null + remove = (key for (key, value) in replacements.items() if value is None) + for key in list(remove): + os.environ.pop(key, None) + replacements.pop(key) + + os.environ.update(replacements) + + try: + yield saved + finally: + for key in replacements: + os.environ.pop(key, None) + os.environ.update(saved) + + +@contextlib.contextmanager +def quiet(): + """ + Redirect stdout/stderr to StringIO objects to prevent console output from + distutils commands. + """ + + old_stdout = sys.stdout + old_stderr = sys.stderr + new_stdout = sys.stdout = io.StringIO() + new_stderr = sys.stderr = io.StringIO() + try: + yield new_stdout, new_stderr + finally: + new_stdout.seek(0) + new_stderr.seek(0) + sys.stdout = old_stdout + sys.stderr = old_stderr + + +@contextlib.contextmanager +def save_user_site_setting(): + saved = site.ENABLE_USER_SITE + try: + yield saved + finally: + site.ENABLE_USER_SITE = saved + + +@contextlib.contextmanager +def save_pkg_resources_state(): + pr_state = pkg_resources.__getstate__() + # also save sys.path + sys_path = sys.path[:] + try: + yield pr_state, sys_path + finally: + sys.path[:] = sys_path + pkg_resources.__setstate__(pr_state) + + +@contextlib.contextmanager +def suppress_exceptions(*excs): + try: + yield + except excs: + pass diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py new file mode 100644 index 00000000..c0274c33 --- /dev/null +++ b/setuptools/tests/environment.py @@ -0,0 +1,62 @@ +import os +import sys +import unicodedata + +from subprocess import Popen as _Popen, PIPE as _PIPE + + +def _which_dirs(cmd): + result = set() + for path in os.environ.get('PATH', '').split(os.pathsep): + filename = os.path.join(path, cmd) + if os.access(filename, os.X_OK): + result.add(path) + return result + + +def run_setup_py(cmd, pypath=None, path=None, + data_stream=0, env=None): + """ + Execution command for tests, separate from those used by the + code directly to prevent accidental behavior issues + """ + if env is None: + env = dict() + for envname in os.environ: + env[envname] = os.environ[envname] + + # override the python path if needed + if pypath is not None: + env["PYTHONPATH"] = pypath + + # override the execution path if needed + if path is not None: + env["PATH"] = path + if not env.get("PATH", ""): + env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip")) + env["PATH"] = os.pathsep.join(env["PATH"]) + + cmd = [sys.executable, "setup.py"] + list(cmd) + + # http://bugs.python.org/issue8557 + shell = sys.platform == 'win32' + + try: + proc = _Popen( + cmd, stdout=_PIPE, stderr=_PIPE, shell=shell, env=env, + ) + + if isinstance(data_stream, tuple): + data_stream = slice(*data_stream) + data = proc.communicate()[data_stream] + except OSError: + return 1, '' + + # decode the console string if needed + if hasattr(data, "decode"): + # use the default encoding + data = data.decode() + data = unicodedata.normalize('NFC', data) + + # communicate calls wait() + return proc.returncode, data diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py new file mode 100644 index 00000000..a5a172e0 --- /dev/null +++ b/setuptools/tests/fixtures.py @@ -0,0 +1,74 @@ +import contextlib +import sys +import shutil +import subprocess + +import pytest + +from . import contexts + + +@pytest.fixture +def user_override(monkeypatch): + """ + Override site.USER_BASE and site.USER_SITE with temporary directories in + a context. + """ + with contexts.tempdir() as user_base: + monkeypatch.setattr('site.USER_BASE', user_base) + with contexts.tempdir() as user_site: + monkeypatch.setattr('site.USER_SITE', user_site) + with contexts.save_user_site_setting(): + yield + + +@pytest.fixture +def tmpdir_cwd(tmpdir): + with tmpdir.as_cwd() as orig: + yield orig + + +@pytest.fixture +def tmp_src(request, tmp_path): + """Make a copy of the source dir under `$tmp/src`. + + This fixture is useful whenever it's necessary to run `setup.py` + or `pip install` against the source directory when there's no + control over the number of simultaneous invocations. Such + concurrent runs create and delete directories with the same names + under the target directory and so they influence each other's runs + when they are not being executed sequentially. + """ + tmp_src_path = tmp_path / 'src' + shutil.copytree(request.config.rootdir, tmp_src_path) + return tmp_src_path + + +@pytest.fixture(autouse=True, scope="session") +def workaround_xdist_376(request): + """ + Workaround pytest-dev/pytest-xdist#376 + + ``pytest-xdist`` tends to inject '' into ``sys.path``, + which may break certain isolation expectations. + Remove the entry so the import + machinery behaves the same irrespective of xdist. + """ + if not request.config.pluginmanager.has_plugin('xdist'): + return + + with contextlib.suppress(ValueError): + sys.path.remove('') + + +@pytest.fixture +def sample_project(tmp_path): + """ + Clone the 'sampleproject' and return a path to it. + """ + cmd = ['git', 'clone', 'https://github.com/pypa/sampleproject'] + try: + subprocess.check_call(cmd, cwd=str(tmp_path)) + except Exception: + pytest.skip("Unable to clone sampleproject") + return tmp_path / 'sampleproject' diff --git a/setuptools/tests/indexes/test_links_priority/external.html b/setuptools/tests/indexes/test_links_priority/external.html new file mode 100644 index 00000000..92e4702f --- /dev/null +++ b/setuptools/tests/indexes/test_links_priority/external.html @@ -0,0 +1,3 @@ +<html><body> +<a href="/foobar-0.1.tar.gz#md5=1__bad_md5___">bad old link</a> +</body></html> diff --git a/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html b/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html new file mode 100644 index 00000000..fefb028b --- /dev/null +++ b/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html @@ -0,0 +1,4 @@ +<html><body> +<a href="/foobar-0.1.tar.gz#md5=0_correct_md5">foobar-0.1.tar.gz</a><br/> +<a href="../../external.html" rel="homepage">external homepage</a><br/> +</body></html> diff --git a/setuptools/tests/mod_with_constant.py b/setuptools/tests/mod_with_constant.py new file mode 100644 index 00000000..ef755dd1 --- /dev/null +++ b/setuptools/tests/mod_with_constant.py @@ -0,0 +1 @@ +value = 'three, sir!' diff --git a/setuptools/tests/namespaces.py b/setuptools/tests/namespaces.py new file mode 100644 index 00000000..245cf8ea --- /dev/null +++ b/setuptools/tests/namespaces.py @@ -0,0 +1,40 @@ +import textwrap + + +def build_namespace_package(tmpdir, name): + src_dir = tmpdir / name + src_dir.mkdir() + setup_py = src_dir / 'setup.py' + namespace, sep, rest = name.partition('.') + script = textwrap.dedent(""" + import setuptools + setuptools.setup( + name={name!r}, + version="1.0", + namespace_packages=[{namespace!r}], + packages=[{namespace!r}], + ) + """).format(**locals()) + setup_py.write_text(script, encoding='utf-8') + ns_pkg_dir = src_dir / namespace + ns_pkg_dir.mkdir() + pkg_init = ns_pkg_dir / '__init__.py' + tmpl = '__import__("pkg_resources").declare_namespace({namespace!r})' + decl = tmpl.format(**locals()) + pkg_init.write_text(decl, encoding='utf-8') + pkg_mod = ns_pkg_dir / (rest + '.py') + some_functionality = 'name = {rest!r}'.format(**locals()) + pkg_mod.write_text(some_functionality, encoding='utf-8') + return src_dir + + +def make_site_dir(target): + """ + Add a sitecustomize.py module in target to cause + target to be added to site dirs such that .pth files + are processed there. + """ + sc = target / 'sitecustomize.py' + target_str = str(target) + tmpl = '__import__("site").addsitedir({target_str!r})' + sc.write_text(tmpl.format(**locals()), encoding='utf-8') diff --git a/setuptools/tests/requirements.txt b/setuptools/tests/requirements.txt new file mode 100644 index 00000000..b2d84a94 --- /dev/null +++ b/setuptools/tests/requirements.txt @@ -0,0 +1,14 @@ +mock +pytest-flake8 +flake8-2020; python_version>="3.6" +virtualenv>=13.0.0 +pytest-virtualenv>=1.2.7 +pytest>=3.7 +wheel +coverage>=4.5.1 +pytest-cov>=2.5.1 +paver; python_version>="3.6" +futures; python_version=="2.7" +pip>=19.1 # For proper file:// URLs support. +jaraco.envs +sphinx diff --git a/setuptools/tests/script-with-bom.py b/setuptools/tests/script-with-bom.py new file mode 100644 index 00000000..22dee0d2 --- /dev/null +++ b/setuptools/tests/script-with-bom.py @@ -0,0 +1,3 @@ +# -*- coding: utf-8 -*- + +result = 'passed' diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py new file mode 100644 index 00000000..6717c053 --- /dev/null +++ b/setuptools/tests/server.py @@ -0,0 +1,90 @@ +"""Basic http server for tests to simulate PyPI or custom indexes +""" + +import os +import time +import threading +import http.server +import urllib.parse +import urllib.request + + +class IndexServer(http.server.HTTPServer): + """Basic single-threaded http server simulating a package index + + You can use this server in unittest like this:: + s = IndexServer() + s.start() + index_url = s.base_url() + 'mytestindex' + # do some test requests to the index + # The index files should be located in setuptools/tests/indexes + s.stop() + """ + + def __init__( + self, server_address=('', 0), + RequestHandlerClass=http.server.SimpleHTTPRequestHandler): + http.server.HTTPServer.__init__( + self, server_address, RequestHandlerClass) + self._run = True + + def start(self): + self.thread = threading.Thread(target=self.serve_forever) + self.thread.start() + + def stop(self): + "Stop the server" + + # Let the server finish the last request and wait for a new one. + time.sleep(0.1) + + self.shutdown() + self.thread.join() + self.socket.close() + + def base_url(self): + port = self.server_port + return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port + + +class RequestRecorder(http.server.BaseHTTPRequestHandler): + def do_GET(self): + requests = vars(self.server).setdefault('requests', []) + requests.append(self) + self.send_response(200, 'OK') + + +class MockServer(http.server.HTTPServer, threading.Thread): + """ + A simple HTTP Server that records the requests made to it. + """ + + def __init__( + self, server_address=('', 0), + RequestHandlerClass=RequestRecorder): + http.server.HTTPServer.__init__( + self, server_address, RequestHandlerClass) + threading.Thread.__init__(self) + self.daemon = True + self.requests = [] + + def run(self): + self.serve_forever() + + @property + def netloc(self): + return 'localhost:%s' % self.server_port + + @property + def url(self): + return 'http://%s/' % self.netloc + + +def path_to_url(path, authority=None): + """ Convert a path to a file: URL. """ + path = os.path.normpath(os.path.abspath(path)) + base = 'file:' + if authority is not None: + base += '//' + authority + url = urllib.parse.urljoin(base, urllib.request.pathname2url(path)) + return url diff --git a/setuptools/tests/test_archive_util.py b/setuptools/tests/test_archive_util.py new file mode 100644 index 00000000..7f996244 --- /dev/null +++ b/setuptools/tests/test_archive_util.py @@ -0,0 +1,38 @@ +# coding: utf-8 + +import tarfile +import io + +import pytest + +from setuptools import archive_util + + +@pytest.fixture +def tarfile_with_unicode(tmpdir): + """ + Create a tarfile containing only a file whose name is + a zero byte file called testimäge.png. + """ + tarobj = io.BytesIO() + + with tarfile.open(fileobj=tarobj, mode="w:gz") as tgz: + data = b"" + + filename = "testimäge.png" + + t = tarfile.TarInfo(filename) + t.size = len(data) + + tgz.addfile(t, io.BytesIO(data)) + + target = tmpdir / 'unicode-pkg-1.0.tar.gz' + with open(str(target), mode='wb') as tf: + tf.write(tarobj.getvalue()) + return str(target) + + +@pytest.mark.xfail(reason="#710 and #712") +def test_unicode_files(tarfile_with_unicode, tmpdir): + target = tmpdir / 'out' + archive_util.unpack_archive(tarfile_with_unicode, str(target)) diff --git a/setuptools/tests/test_bdist_deprecations.py b/setuptools/tests/test_bdist_deprecations.py new file mode 100644 index 00000000..28482fd0 --- /dev/null +++ b/setuptools/tests/test_bdist_deprecations.py @@ -0,0 +1,27 @@ +"""develop tests +""" +import mock +import sys + +import pytest + +from setuptools.dist import Distribution +from setuptools import SetuptoolsDeprecationWarning + + +@pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only') +@mock.patch('distutils.command.bdist_rpm.bdist_rpm') +def test_bdist_rpm_warning(distutils_cmd): + dist = Distribution( + dict( + script_name='setup.py', + script_args=['bdist_rpm'], + name='foo', + py_modules=['hi'], + ) + ) + dist.parse_command_line() + with pytest.warns(SetuptoolsDeprecationWarning): + dist.run_commands() + + distutils_cmd.run.assert_called_once() diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py new file mode 100644 index 00000000..fb5b90b1 --- /dev/null +++ b/setuptools/tests/test_bdist_egg.py @@ -0,0 +1,66 @@ +"""develop tests +""" +import os +import re +import zipfile + +import pytest + +from setuptools.dist import Distribution + +from . import contexts + +SETUP_PY = """\ +from setuptools import setup + +setup(name='foo', py_modules=['hi']) +""" + + +@pytest.fixture(scope='function') +def setup_context(tmpdir): + with (tmpdir / 'setup.py').open('w') as f: + f.write(SETUP_PY) + with (tmpdir / 'hi.py').open('w') as f: + f.write('1\n') + with tmpdir.as_cwd(): + yield tmpdir + + +class Test: + def test_bdist_egg(self, setup_context, user_override): + dist = Distribution(dict( + script_name='setup.py', + script_args=['bdist_egg'], + name='foo', + py_modules=['hi'], + )) + os.makedirs(os.path.join('build', 'src')) + with contexts.quiet(): + dist.parse_command_line() + dist.run_commands() + + # let's see if we got our egg link at the right place + [content] = os.listdir('dist') + assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content) + + @pytest.mark.xfail( + os.environ.get('PYTHONDONTWRITEBYTECODE'), + reason="Byte code disabled", + ) + def test_exclude_source_files(self, setup_context, user_override): + dist = Distribution(dict( + script_name='setup.py', + script_args=['bdist_egg', '--exclude-source-files'], + name='foo', + py_modules=['hi'], + )) + with contexts.quiet(): + dist.parse_command_line() + dist.run_commands() + [dist_name] = os.listdir('dist') + dist_filename = os.path.join('dist', dist_name) + zip = zipfile.ZipFile(dist_filename) + names = list(zi.filename for zi in zip.filelist) + assert 'hi.pyc' in names + assert 'hi.py' not in names diff --git a/setuptools/tests/test_build_clib.py b/setuptools/tests/test_build_clib.py new file mode 100644 index 00000000..48bea2b4 --- /dev/null +++ b/setuptools/tests/test_build_clib.py @@ -0,0 +1,57 @@ +import pytest + +import mock +from distutils.errors import DistutilsSetupError +from setuptools.command.build_clib import build_clib +from setuptools.dist import Distribution + + +class TestBuildCLib: + @mock.patch( + 'setuptools.command.build_clib.newer_pairwise_group') + def test_build_libraries(self, mock_newer): + dist = Distribution() + cmd = build_clib(dist) + + # this will be a long section, just making sure all + # exceptions are properly raised + libs = [('example', {'sources': 'broken.c'})] + with pytest.raises(DistutilsSetupError): + cmd.build_libraries(libs) + + obj_deps = 'some_string' + libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})] + with pytest.raises(DistutilsSetupError): + cmd.build_libraries(libs) + + obj_deps = {'': ''} + libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})] + with pytest.raises(DistutilsSetupError): + cmd.build_libraries(libs) + + obj_deps = {'source.c': ''} + libs = [('example', {'sources': ['source.c'], 'obj_deps': obj_deps})] + with pytest.raises(DistutilsSetupError): + cmd.build_libraries(libs) + + # with that out of the way, let's see if the crude dependency + # system works + cmd.compiler = mock.MagicMock(spec=cmd.compiler) + mock_newer.return_value = ([], []) + + obj_deps = {'': ('global.h',), 'example.c': ('example.h',)} + libs = [('example', {'sources': ['example.c'], 'obj_deps': obj_deps})] + + cmd.build_libraries(libs) + assert [['example.c', 'global.h', 'example.h']] in \ + mock_newer.call_args[0] + assert not cmd.compiler.compile.called + assert cmd.compiler.create_static_lib.call_count == 1 + + # reset the call numbers so we can test again + cmd.compiler.reset_mock() + + mock_newer.return_value = '' # anything as long as it's not ([],[]) + cmd.build_libraries(libs) + assert cmd.compiler.compile.call_count == 1 + assert cmd.compiler.create_static_lib.call_count == 1 diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py new file mode 100644 index 00000000..3177a2cd --- /dev/null +++ b/setuptools/tests/test_build_ext.py @@ -0,0 +1,150 @@ +import os +import sys +import distutils.command.build_ext as orig +from distutils.sysconfig import get_config_var + +from jaraco import path + +from setuptools.command.build_ext import build_ext, get_abi3_suffix +from setuptools.dist import Distribution +from setuptools.extension import Extension + +from . import environment +from .textwrap import DALS + + +IS_PYPY = '__pypy__' in sys.builtin_module_names + + +class TestBuildExt: + def test_get_ext_filename(self): + """ + Setuptools needs to give back the same + result as distutils, even if the fullname + is not in ext_map. + """ + dist = Distribution() + cmd = build_ext(dist) + cmd.ext_map['foo/bar'] = '' + res = cmd.get_ext_filename('foo') + wanted = orig.build_ext.get_ext_filename(cmd, 'foo') + assert res == wanted + + def test_abi3_filename(self): + """ + Filename needs to be loadable by several versions + of Python 3 if 'is_abi3' is truthy on Extension() + """ + print(get_abi3_suffix()) + + extension = Extension('spam.eggs', ['eggs.c'], py_limited_api=True) + dist = Distribution(dict(ext_modules=[extension])) + cmd = build_ext(dist) + cmd.finalize_options() + assert 'spam.eggs' in cmd.ext_map + res = cmd.get_ext_filename('spam.eggs') + + if not get_abi3_suffix(): + assert res.endswith(get_config_var('EXT_SUFFIX')) + elif sys.platform == 'win32': + assert res.endswith('eggs.pyd') + else: + assert 'abi3' in res + + def test_ext_suffix_override(self): + """ + SETUPTOOLS_EXT_SUFFIX variable always overrides + default extension options. + """ + dist = Distribution() + cmd = build_ext(dist) + cmd.ext_map['for_abi3'] = ext = Extension( + 'for_abi3', + ['s.c'], + # Override shouldn't affect abi3 modules + py_limited_api=True, + ) + # Mock value needed to pass tests + ext._links_to_dynamic = False + + if not IS_PYPY: + expect = cmd.get_ext_filename('for_abi3') + else: + # PyPy builds do not use ABI3 tag, so they will + # also get the overridden suffix. + expect = 'for_abi3.test-suffix' + + try: + os.environ['SETUPTOOLS_EXT_SUFFIX'] = '.test-suffix' + res = cmd.get_ext_filename('normal') + assert 'normal.test-suffix' == res + res = cmd.get_ext_filename('for_abi3') + assert expect == res + finally: + del os.environ['SETUPTOOLS_EXT_SUFFIX'] + + +def test_build_ext_config_handling(tmpdir_cwd): + files = { + 'setup.py': DALS( + """ + from setuptools import Extension, setup + setup( + name='foo', + version='0.0.0', + ext_modules=[Extension('foo', ['foo.c'])], + ) + """), + 'foo.c': DALS( + """ + #include "Python.h" + + #if PY_MAJOR_VERSION >= 3 + + static struct PyModuleDef moduledef = { + PyModuleDef_HEAD_INIT, + "foo", + NULL, + 0, + NULL, + NULL, + NULL, + NULL, + NULL + }; + + #define INITERROR return NULL + + PyMODINIT_FUNC PyInit_foo(void) + + #else + + #define INITERROR return + + void initfoo(void) + + #endif + { + #if PY_MAJOR_VERSION >= 3 + PyObject *module = PyModule_Create(&moduledef); + #else + PyObject *module = Py_InitModule("extension", NULL); + #endif + if (module == NULL) + INITERROR; + #if PY_MAJOR_VERSION >= 3 + return module; + #endif + } + """), + 'setup.cfg': DALS( + """ + [build] + build_base = foo_build + """), + } + path.build(files) + code, output = environment.run_setup_py( + cmd=['build'], data_stream=(0, 2), + ) + assert code == 0, '\nSTDOUT:\n%s\nSTDERR:\n%s' % output diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py new file mode 100644 index 00000000..0f4a1a73 --- /dev/null +++ b/setuptools/tests/test_build_meta.py @@ -0,0 +1,472 @@ +import os +import shutil +import tarfile +import importlib +from concurrent import futures +import re + +import pytest +from jaraco import path + +from .textwrap import DALS + + +class BuildBackendBase: + def __init__(self, cwd='.', env={}, backend_name='setuptools.build_meta'): + self.cwd = cwd + self.env = env + self.backend_name = backend_name + + +class BuildBackend(BuildBackendBase): + """PEP 517 Build Backend""" + + def __init__(self, *args, **kwargs): + super(BuildBackend, self).__init__(*args, **kwargs) + self.pool = futures.ProcessPoolExecutor(max_workers=1) + + def __getattr__(self, name): + """Handles aribrary function invocations on the build backend.""" + + def method(*args, **kw): + root = os.path.abspath(self.cwd) + caller = BuildBackendCaller(root, self.env, self.backend_name) + return self.pool.submit(caller, name, *args, **kw).result() + + return method + + +class BuildBackendCaller(BuildBackendBase): + def __init__(self, *args, **kwargs): + super(BuildBackendCaller, self).__init__(*args, **kwargs) + + (self.backend_name, _, + self.backend_obj) = self.backend_name.partition(':') + + def __call__(self, name, *args, **kw): + """Handles aribrary function invocations on the build backend.""" + os.chdir(self.cwd) + os.environ.update(self.env) + mod = importlib.import_module(self.backend_name) + + if self.backend_obj: + backend = getattr(mod, self.backend_obj) + else: + backend = mod + + return getattr(backend, name)(*args, **kw) + + +defns = [ + { + 'setup.py': DALS(""" + __import__('setuptools').setup( + name='foo', + version='0.0.0', + py_modules=['hello'], + setup_requires=['six'], + ) + """), + 'hello.py': DALS(""" + def run(): + print('hello') + """), + }, + { + 'setup.py': DALS(""" + assert __name__ == '__main__' + __import__('setuptools').setup( + name='foo', + version='0.0.0', + py_modules=['hello'], + setup_requires=['six'], + ) + """), + 'hello.py': DALS(""" + def run(): + print('hello') + """), + }, + { + 'setup.py': DALS(""" + variable = True + def function(): + return variable + assert variable + __import__('setuptools').setup( + name='foo', + version='0.0.0', + py_modules=['hello'], + setup_requires=['six'], + ) + """), + 'hello.py': DALS(""" + def run(): + print('hello') + """), + }, + { + 'setup.cfg': DALS(""" + [metadata] + name = foo + version = 0.0.0 + + [options] + py_modules=hello + setup_requires=six + """), + 'hello.py': DALS(""" + def run(): + print('hello') + """) + }, +] + + +class TestBuildMetaBackend: + backend_name = 'setuptools.build_meta' + + def get_build_backend(self): + return BuildBackend(backend_name=self.backend_name) + + @pytest.fixture(params=defns) + def build_backend(self, tmpdir, request): + path.build(request.param, prefix=str(tmpdir)) + with tmpdir.as_cwd(): + yield self.get_build_backend() + + def test_get_requires_for_build_wheel(self, build_backend): + actual = build_backend.get_requires_for_build_wheel() + expected = ['six', 'wheel'] + assert sorted(actual) == sorted(expected) + + def test_get_requires_for_build_sdist(self, build_backend): + actual = build_backend.get_requires_for_build_sdist() + expected = ['six'] + assert sorted(actual) == sorted(expected) + + def test_build_wheel(self, build_backend): + dist_dir = os.path.abspath('pip-wheel') + os.makedirs(dist_dir) + wheel_name = build_backend.build_wheel(dist_dir) + + assert os.path.isfile(os.path.join(dist_dir, wheel_name)) + + @pytest.mark.parametrize('build_type', ('wheel', 'sdist')) + def test_build_with_existing_file_present(self, build_type, tmpdir_cwd): + # Building a sdist/wheel should still succeed if there's + # already a sdist/wheel in the destination directory. + files = { + 'setup.py': "from setuptools import setup\nsetup()", + 'VERSION': "0.0.1", + 'setup.cfg': DALS(""" + [metadata] + name = foo + version = file: VERSION + """), + 'pyproject.toml': DALS(""" + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + """), + } + + path.build(files) + + dist_dir = os.path.abspath('preexisting-' + build_type) + + build_backend = self.get_build_backend() + build_method = getattr(build_backend, 'build_' + build_type) + + # Build a first sdist/wheel. + # Note: this also check the destination directory is + # successfully created if it does not exist already. + first_result = build_method(dist_dir) + + # Change version. + with open("VERSION", "wt") as version_file: + version_file.write("0.0.2") + + # Build a *second* sdist/wheel. + second_result = build_method(dist_dir) + + assert os.path.isfile(os.path.join(dist_dir, first_result)) + assert first_result != second_result + + # And if rebuilding the exact same sdist/wheel? + open(os.path.join(dist_dir, second_result), 'w').close() + third_result = build_method(dist_dir) + assert third_result == second_result + assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0 + + def test_build_sdist(self, build_backend): + dist_dir = os.path.abspath('pip-sdist') + os.makedirs(dist_dir) + sdist_name = build_backend.build_sdist(dist_dir) + + assert os.path.isfile(os.path.join(dist_dir, sdist_name)) + + def test_prepare_metadata_for_build_wheel(self, build_backend): + dist_dir = os.path.abspath('pip-dist-info') + os.makedirs(dist_dir) + + dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir) + + assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA')) + + def test_build_sdist_explicit_dist(self, build_backend): + # explicitly specifying the dist folder should work + # the folder sdist_directory and the ``--dist-dir`` can be the same + dist_dir = os.path.abspath('dist') + sdist_name = build_backend.build_sdist(dist_dir) + assert os.path.isfile(os.path.join(dist_dir, sdist_name)) + + def test_build_sdist_version_change(self, build_backend): + sdist_into_directory = os.path.abspath("out_sdist") + os.makedirs(sdist_into_directory) + + sdist_name = build_backend.build_sdist(sdist_into_directory) + assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name)) + + # if the setup.py changes subsequent call of the build meta + # should still succeed, given the + # sdist_directory the frontend specifies is empty + setup_loc = os.path.abspath("setup.py") + if not os.path.exists(setup_loc): + setup_loc = os.path.abspath("setup.cfg") + + with open(setup_loc, 'rt') as file_handler: + content = file_handler.read() + with open(setup_loc, 'wt') as file_handler: + file_handler.write( + content.replace("version='0.0.0'", "version='0.0.1'")) + + shutil.rmtree(sdist_into_directory) + os.makedirs(sdist_into_directory) + + sdist_name = build_backend.build_sdist("out_sdist") + assert os.path.isfile( + os.path.join(os.path.abspath("out_sdist"), sdist_name)) + + def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd): + files = { + 'setup.py': DALS(""" + __import__('setuptools').setup( + name='foo', + version='0.0.0', + py_modules=['hello'] + )"""), + 'hello.py': '', + 'pyproject.toml': DALS(""" + [build-system] + requires = ["setuptools", "wheel"] + build-backend = "setuptools.build_meta" + """), + } + path.build(files) + build_backend = self.get_build_backend() + targz_path = build_backend.build_sdist("temp") + with tarfile.open(os.path.join("temp", targz_path)) as tar: + assert any('pyproject.toml' in name for name in tar.getnames()) + + def test_build_sdist_setup_py_exists(self, tmpdir_cwd): + # If build_sdist is called from a script other than setup.py, + # ensure setup.py is included + path.build(defns[0]) + + build_backend = self.get_build_backend() + targz_path = build_backend.build_sdist("temp") + with tarfile.open(os.path.join("temp", targz_path)) as tar: + assert any('setup.py' in name for name in tar.getnames()) + + def test_build_sdist_setup_py_manifest_excluded(self, tmpdir_cwd): + # Ensure that MANIFEST.in can exclude setup.py + files = { + 'setup.py': DALS(""" + __import__('setuptools').setup( + name='foo', + version='0.0.0', + py_modules=['hello'] + )"""), + 'hello.py': '', + 'MANIFEST.in': DALS(""" + exclude setup.py + """) + } + + path.build(files) + + build_backend = self.get_build_backend() + targz_path = build_backend.build_sdist("temp") + with tarfile.open(os.path.join("temp", targz_path)) as tar: + assert not any('setup.py' in name for name in tar.getnames()) + + def test_build_sdist_builds_targz_even_if_zip_indicated(self, tmpdir_cwd): + files = { + 'setup.py': DALS(""" + __import__('setuptools').setup( + name='foo', + version='0.0.0', + py_modules=['hello'] + )"""), + 'hello.py': '', + 'setup.cfg': DALS(""" + [sdist] + formats=zip + """) + } + + path.build(files) + + build_backend = self.get_build_backend() + build_backend.build_sdist("temp") + + _relative_path_import_files = { + 'setup.py': DALS(""" + __import__('setuptools').setup( + name='foo', + version=__import__('hello').__version__, + py_modules=['hello'] + )"""), + 'hello.py': '__version__ = "0.0.0"', + 'setup.cfg': DALS(""" + [sdist] + formats=zip + """) + } + + def test_build_sdist_relative_path_import(self, tmpdir_cwd): + path.build(self._relative_path_import_files) + build_backend = self.get_build_backend() + with pytest.raises(ImportError, match="^No module named 'hello'$"): + build_backend.build_sdist("temp") + + @pytest.mark.parametrize('setup_literal, requirements', [ + ("'foo'", ['foo']), + ("['foo']", ['foo']), + (r"'foo\n'", ['foo']), + (r"'foo\n\n'", ['foo']), + ("['foo', 'bar']", ['foo', 'bar']), + (r"'# Has a comment line\nfoo'", ['foo']), + (r"'foo # Has an inline comment'", ['foo']), + (r"'foo \\\n >=3.0'", ['foo>=3.0']), + (r"'foo\nbar'", ['foo', 'bar']), + (r"'foo\nbar\n'", ['foo', 'bar']), + (r"['foo\n', 'bar\n']", ['foo', 'bar']), + ]) + @pytest.mark.parametrize('use_wheel', [True, False]) + def test_setup_requires(self, setup_literal, requirements, use_wheel, + tmpdir_cwd): + + files = { + 'setup.py': DALS(""" + from setuptools import setup + + setup( + name="qux", + version="0.0.0", + py_modules=["hello"], + setup_requires={setup_literal}, + ) + """).format(setup_literal=setup_literal), + 'hello.py': DALS(""" + def run(): + print('hello') + """), + } + + path.build(files) + + build_backend = self.get_build_backend() + + if use_wheel: + base_requirements = ['wheel'] + get_requires = build_backend.get_requires_for_build_wheel + else: + base_requirements = [] + get_requires = build_backend.get_requires_for_build_sdist + + # Ensure that the build requirements are properly parsed + expected = sorted(base_requirements + requirements) + actual = get_requires() + + assert expected == sorted(actual) + + def test_dont_install_setup_requires(self, tmpdir_cwd): + files = { + 'setup.py': DALS(""" + from setuptools import setup + + setup( + name="qux", + version="0.0.0", + py_modules=["hello"], + setup_requires=["does-not-exist >99"], + ) + """), + 'hello.py': DALS(""" + def run(): + print('hello') + """), + } + + path.build(files) + + build_backend = self.get_build_backend() + + dist_dir = os.path.abspath('pip-dist-info') + os.makedirs(dist_dir) + + # does-not-exist can't be satisfied, so if it attempts to install + # setup_requires, it will fail. + build_backend.prepare_metadata_for_build_wheel(dist_dir) + + _sys_argv_0_passthrough = { + 'setup.py': DALS(""" + import os + import sys + + __import__('setuptools').setup( + name='foo', + version='0.0.0', + ) + + sys_argv = os.path.abspath(sys.argv[0]) + file_path = os.path.abspath('setup.py') + assert sys_argv == file_path + """) + } + + def test_sys_argv_passthrough(self, tmpdir_cwd): + path.build(self._sys_argv_0_passthrough) + build_backend = self.get_build_backend() + with pytest.raises(AssertionError): + build_backend.build_sdist("temp") + + @pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel')) + def test_build_with_empty_setuppy(self, build_backend, build_hook): + files = {'setup.py': ''} + path.build(files) + + with pytest.raises( + ValueError, + match=re.escape('No distribution was found.')): + getattr(build_backend, build_hook)("temp") + + +class TestBuildMetaLegacyBackend(TestBuildMetaBackend): + backend_name = 'setuptools.build_meta:__legacy__' + + # build_meta_legacy-specific tests + def test_build_sdist_relative_path_import(self, tmpdir_cwd): + # This must fail in build_meta, but must pass in build_meta_legacy + path.build(self._relative_path_import_files) + + build_backend = self.get_build_backend() + build_backend.build_sdist("temp") + + def test_sys_argv_passthrough(self, tmpdir_cwd): + path.build(self._sys_argv_0_passthrough) + + build_backend = self.get_build_backend() + build_backend.build_sdist("temp") diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py new file mode 100644 index 00000000..78a31ac4 --- /dev/null +++ b/setuptools/tests/test_build_py.py @@ -0,0 +1,84 @@ +import os +import stat +import shutil + +import pytest + +from setuptools.dist import Distribution + + +def test_directories_in_package_data_glob(tmpdir_cwd): + """ + Directories matching the glob in package_data should + not be included in the package data. + + Regression test for #261. + """ + dist = Distribution(dict( + script_name='setup.py', + script_args=['build_py'], + packages=[''], + name='foo', + package_data={'': ['path/*']}, + )) + os.makedirs('path/subpath') + dist.parse_command_line() + dist.run_commands() + + +def test_read_only(tmpdir_cwd): + """ + Ensure read-only flag is not preserved in copy + for package modules and package data, as that + causes problems with deleting read-only files on + Windows. + + #1451 + """ + dist = Distribution(dict( + script_name='setup.py', + script_args=['build_py'], + packages=['pkg'], + package_data={'pkg': ['data.dat']}, + name='pkg', + )) + os.makedirs('pkg') + open('pkg/__init__.py', 'w').close() + open('pkg/data.dat', 'w').close() + os.chmod('pkg/__init__.py', stat.S_IREAD) + os.chmod('pkg/data.dat', stat.S_IREAD) + dist.parse_command_line() + dist.run_commands() + shutil.rmtree('build') + + +@pytest.mark.xfail( + 'platform.system() == "Windows"', + reason="On Windows, files do not have executable bits", + raises=AssertionError, + strict=True, +) +def test_executable_data(tmpdir_cwd): + """ + Ensure executable bit is preserved in copy for + package data, as users rely on it for scripts. + + #2041 + """ + dist = Distribution(dict( + script_name='setup.py', + script_args=['build_py'], + packages=['pkg'], + package_data={'pkg': ['run-me']}, + name='pkg', + )) + os.makedirs('pkg') + open('pkg/__init__.py', 'w').close() + open('pkg/run-me', 'w').close() + os.chmod('pkg/run-me', 0o700) + + dist.parse_command_line() + dist.run_commands() + + assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, \ + "Script is not executable" diff --git a/setuptools/tests/test_config.py b/setuptools/tests/test_config.py new file mode 100644 index 00000000..005742e4 --- /dev/null +++ b/setuptools/tests/test_config.py @@ -0,0 +1,919 @@ +import types +import sys + +import contextlib +import configparser + +import pytest + +from distutils.errors import DistutilsOptionError, DistutilsFileError +from mock import patch +from setuptools.dist import Distribution, _Distribution +from setuptools.config import ConfigHandler, read_configuration +from distutils.core import Command +from .textwrap import DALS + + +class ErrConfigHandler(ConfigHandler): + """Erroneous handler. Fails to implement required methods.""" + + +def make_package_dir(name, base_dir, ns=False): + dir_package = base_dir + for dir_name in name.split('/'): + dir_package = dir_package.mkdir(dir_name) + init_file = None + if not ns: + init_file = dir_package.join('__init__.py') + init_file.write('') + return dir_package, init_file + + +def fake_env( + tmpdir, setup_cfg, setup_py=None, encoding='ascii', package_path='fake_package' +): + + if setup_py is None: + setup_py = 'from setuptools import setup\n' 'setup()\n' + + tmpdir.join('setup.py').write(setup_py) + config = tmpdir.join('setup.cfg') + config.write(setup_cfg.encode(encoding), mode='wb') + + package_dir, init_file = make_package_dir(package_path, tmpdir) + + init_file.write( + 'VERSION = (1, 2, 3)\n' + '\n' + 'VERSION_MAJOR = 1' + '\n' + 'def get_version():\n' + ' return [3, 4, 5, "dev"]\n' + '\n' + ) + + return package_dir, config + + +@contextlib.contextmanager +def get_dist(tmpdir, kwargs_initial=None, parse=True): + kwargs_initial = kwargs_initial or {} + + with tmpdir.as_cwd(): + dist = Distribution(kwargs_initial) + dist.script_name = 'setup.py' + parse and dist.parse_config_files() + + yield dist + + +def test_parsers_implemented(): + + with pytest.raises(NotImplementedError): + handler = ErrConfigHandler(None, {}) + handler.parsers + + +class TestConfigurationReader: + def test_basic(self, tmpdir): + _, config = fake_env( + tmpdir, + '[metadata]\n' + 'version = 10.1.1\n' + 'keywords = one, two\n' + '\n' + '[options]\n' + 'scripts = bin/a.py, bin/b.py\n', + ) + config_dict = read_configuration('%s' % config) + assert config_dict['metadata']['version'] == '10.1.1' + assert config_dict['metadata']['keywords'] == ['one', 'two'] + assert config_dict['options']['scripts'] == ['bin/a.py', 'bin/b.py'] + + def test_no_config(self, tmpdir): + with pytest.raises(DistutilsFileError): + read_configuration('%s' % tmpdir.join('setup.cfg')) + + def test_ignore_errors(self, tmpdir): + _, config = fake_env( + tmpdir, + '[metadata]\n' 'version = attr: none.VERSION\n' 'keywords = one, two\n', + ) + with pytest.raises(ImportError): + read_configuration('%s' % config) + + config_dict = read_configuration('%s' % config, ignore_option_errors=True) + + assert config_dict['metadata']['keywords'] == ['one', 'two'] + assert 'version' not in config_dict['metadata'] + + config.remove() + + +class TestMetadata: + def test_basic(self, tmpdir): + + fake_env( + tmpdir, + '[metadata]\n' + 'version = 10.1.1\n' + 'description = Some description\n' + 'long_description_content_type = text/something\n' + 'long_description = file: README\n' + 'name = fake_name\n' + 'keywords = one, two\n' + 'provides = package, package.sub\n' + 'license = otherlic\n' + 'download_url = http://test.test.com/test/\n' + 'maintainer_email = test@test.com\n', + ) + + tmpdir.join('README').write('readme contents\nline2') + + meta_initial = { + # This will be used so `otherlic` won't replace it. + 'license': 'BSD 3-Clause License', + } + + with get_dist(tmpdir, meta_initial) as dist: + metadata = dist.metadata + + assert metadata.version == '10.1.1' + assert metadata.description == 'Some description' + assert metadata.long_description_content_type == 'text/something' + assert metadata.long_description == 'readme contents\nline2' + assert metadata.provides == ['package', 'package.sub'] + assert metadata.license == 'BSD 3-Clause License' + assert metadata.name == 'fake_name' + assert metadata.keywords == ['one', 'two'] + assert metadata.download_url == 'http://test.test.com/test/' + assert metadata.maintainer_email == 'test@test.com' + + def test_license_cfg(self, tmpdir): + fake_env( + tmpdir, + DALS( + """ + [metadata] + name=foo + version=0.0.1 + license=Apache 2.0 + """ + ), + ) + + with get_dist(tmpdir) as dist: + metadata = dist.metadata + + assert metadata.name == "foo" + assert metadata.version == "0.0.1" + assert metadata.license == "Apache 2.0" + + def test_file_mixed(self, tmpdir): + + fake_env( + tmpdir, + '[metadata]\n' 'long_description = file: README.rst, CHANGES.rst\n' '\n', + ) + + tmpdir.join('README.rst').write('readme contents\nline2') + tmpdir.join('CHANGES.rst').write('changelog contents\nand stuff') + + with get_dist(tmpdir) as dist: + assert dist.metadata.long_description == ( + 'readme contents\nline2\n' 'changelog contents\nand stuff' + ) + + def test_file_sandboxed(self, tmpdir): + + fake_env(tmpdir, '[metadata]\n' 'long_description = file: ../../README\n') + + with get_dist(tmpdir, parse=False) as dist: + with pytest.raises(DistutilsOptionError): + dist.parse_config_files() # file: out of sandbox + + def test_aliases(self, tmpdir): + + fake_env( + tmpdir, + '[metadata]\n' + 'author_email = test@test.com\n' + 'home_page = http://test.test.com/test/\n' + 'summary = Short summary\n' + 'platform = a, b\n' + 'classifier =\n' + ' Framework :: Django\n' + ' Programming Language :: Python :: 3.5\n', + ) + + with get_dist(tmpdir) as dist: + metadata = dist.metadata + assert metadata.author_email == 'test@test.com' + assert metadata.url == 'http://test.test.com/test/' + assert metadata.description == 'Short summary' + assert metadata.platforms == ['a', 'b'] + assert metadata.classifiers == [ + 'Framework :: Django', + 'Programming Language :: Python :: 3.5', + ] + + def test_multiline(self, tmpdir): + + fake_env( + tmpdir, + '[metadata]\n' + 'name = fake_name\n' + 'keywords =\n' + ' one\n' + ' two\n' + 'classifiers =\n' + ' Framework :: Django\n' + ' Programming Language :: Python :: 3.5\n', + ) + with get_dist(tmpdir) as dist: + metadata = dist.metadata + assert metadata.keywords == ['one', 'two'] + assert metadata.classifiers == [ + 'Framework :: Django', + 'Programming Language :: Python :: 3.5', + ] + + def test_dict(self, tmpdir): + + fake_env( + tmpdir, + '[metadata]\n' + 'project_urls =\n' + ' Link One = https://example.com/one/\n' + ' Link Two = https://example.com/two/\n', + ) + with get_dist(tmpdir) as dist: + metadata = dist.metadata + assert metadata.project_urls == { + 'Link One': 'https://example.com/one/', + 'Link Two': 'https://example.com/two/', + } + + def test_version(self, tmpdir): + + package_dir, config = fake_env( + tmpdir, '[metadata]\n' 'version = attr: fake_package.VERSION\n' + ) + + sub_a = package_dir.mkdir('subpkg_a') + sub_a.join('__init__.py').write('') + sub_a.join('mod.py').write('VERSION = (2016, 11, 26)') + + sub_b = package_dir.mkdir('subpkg_b') + sub_b.join('__init__.py').write('') + sub_b.join('mod.py').write( + 'import third_party_module\n' 'VERSION = (2016, 11, 26)' + ) + + with get_dist(tmpdir) as dist: + assert dist.metadata.version == '1.2.3' + + config.write('[metadata]\n' 'version = attr: fake_package.get_version\n') + with get_dist(tmpdir) as dist: + assert dist.metadata.version == '3.4.5.dev' + + config.write('[metadata]\n' 'version = attr: fake_package.VERSION_MAJOR\n') + with get_dist(tmpdir) as dist: + assert dist.metadata.version == '1' + + config.write( + '[metadata]\n' 'version = attr: fake_package.subpkg_a.mod.VERSION\n' + ) + with get_dist(tmpdir) as dist: + assert dist.metadata.version == '2016.11.26' + + config.write( + '[metadata]\n' 'version = attr: fake_package.subpkg_b.mod.VERSION\n' + ) + with get_dist(tmpdir) as dist: + assert dist.metadata.version == '2016.11.26' + + def test_version_file(self, tmpdir): + + _, config = fake_env( + tmpdir, '[metadata]\n' 'version = file: fake_package/version.txt\n' + ) + tmpdir.join('fake_package', 'version.txt').write('1.2.3\n') + + with get_dist(tmpdir) as dist: + assert dist.metadata.version == '1.2.3' + + tmpdir.join('fake_package', 'version.txt').write('1.2.3\n4.5.6\n') + with pytest.raises(DistutilsOptionError): + with get_dist(tmpdir) as dist: + dist.metadata.version + + def test_version_with_package_dir_simple(self, tmpdir): + + _, config = fake_env( + tmpdir, + '[metadata]\n' + 'version = attr: fake_package_simple.VERSION\n' + '[options]\n' + 'package_dir =\n' + ' = src\n', + package_path='src/fake_package_simple', + ) + + with get_dist(tmpdir) as dist: + assert dist.metadata.version == '1.2.3' + + def test_version_with_package_dir_rename(self, tmpdir): + + _, config = fake_env( + tmpdir, + '[metadata]\n' + 'version = attr: fake_package_rename.VERSION\n' + '[options]\n' + 'package_dir =\n' + ' fake_package_rename = fake_dir\n', + package_path='fake_dir', + ) + + with get_dist(tmpdir) as dist: + assert dist.metadata.version == '1.2.3' + + def test_version_with_package_dir_complex(self, tmpdir): + + _, config = fake_env( + tmpdir, + '[metadata]\n' + 'version = attr: fake_package_complex.VERSION\n' + '[options]\n' + 'package_dir =\n' + ' fake_package_complex = src/fake_dir\n', + package_path='src/fake_dir', + ) + + with get_dist(tmpdir) as dist: + assert dist.metadata.version == '1.2.3' + + def test_unknown_meta_item(self, tmpdir): + + fake_env(tmpdir, '[metadata]\n' 'name = fake_name\n' 'unknown = some\n') + with get_dist(tmpdir, parse=False) as dist: + dist.parse_config_files() # Skip unknown. + + def test_usupported_section(self, tmpdir): + + fake_env(tmpdir, '[metadata.some]\n' 'key = val\n') + with get_dist(tmpdir, parse=False) as dist: + with pytest.raises(DistutilsOptionError): + dist.parse_config_files() + + def test_classifiers(self, tmpdir): + expected = set( + [ + 'Framework :: Django', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + ] + ) + + # From file. + _, config = fake_env(tmpdir, '[metadata]\n' 'classifiers = file: classifiers\n') + + tmpdir.join('classifiers').write( + 'Framework :: Django\n' + 'Programming Language :: Python :: 3\n' + 'Programming Language :: Python :: 3.5\n' + ) + + with get_dist(tmpdir) as dist: + assert set(dist.metadata.classifiers) == expected + + # From list notation + config.write( + '[metadata]\n' + 'classifiers =\n' + ' Framework :: Django\n' + ' Programming Language :: Python :: 3\n' + ' Programming Language :: Python :: 3.5\n' + ) + with get_dist(tmpdir) as dist: + assert set(dist.metadata.classifiers) == expected + + def test_deprecated_config_handlers(self, tmpdir): + fake_env( + tmpdir, + '[metadata]\n' + 'version = 10.1.1\n' + 'description = Some description\n' + 'requires = some, requirement\n', + ) + + with pytest.deprecated_call(): + with get_dist(tmpdir) as dist: + metadata = dist.metadata + + assert metadata.version == '10.1.1' + assert metadata.description == 'Some description' + assert metadata.requires == ['some', 'requirement'] + + def test_interpolation(self, tmpdir): + fake_env(tmpdir, '[metadata]\n' 'description = %(message)s\n') + with pytest.raises(configparser.InterpolationMissingOptionError): + with get_dist(tmpdir): + pass + + def test_non_ascii_1(self, tmpdir): + fake_env(tmpdir, '[metadata]\n' 'description = éàïôñ\n', encoding='utf-8') + with get_dist(tmpdir): + pass + + def test_non_ascii_3(self, tmpdir): + fake_env(tmpdir, '\n' '# -*- coding: invalid\n') + with get_dist(tmpdir): + pass + + def test_non_ascii_4(self, tmpdir): + fake_env( + tmpdir, + '# -*- coding: utf-8\n' '[metadata]\n' 'description = éàïôñ\n', + encoding='utf-8', + ) + with get_dist(tmpdir) as dist: + assert dist.metadata.description == 'éàïôñ' + + def test_not_utf8(self, tmpdir): + """ + Config files encoded not in UTF-8 will fail + """ + fake_env( + tmpdir, + '# vim: set fileencoding=iso-8859-15 :\n' + '[metadata]\n' + 'description = éàïôñ\n', + encoding='iso-8859-15', + ) + with pytest.raises(UnicodeDecodeError): + with get_dist(tmpdir): + pass + + def test_warn_dash_deprecation(self, tmpdir): + # warn_dash_deprecation() is a method in setuptools.dist + # remove this test and the method when no longer needed + fake_env( + tmpdir, + '[metadata]\n' + 'author-email = test@test.com\n' + 'maintainer_email = foo@foo.com\n', + ) + msg = ( + "Usage of dash-separated 'author-email' will not be supported " + "in future versions. " + "Please use the underscore name 'author_email' instead" + ) + with pytest.warns(UserWarning, match=msg): + with get_dist(tmpdir) as dist: + metadata = dist.metadata + + assert metadata.author_email == 'test@test.com' + assert metadata.maintainer_email == 'foo@foo.com' + + def test_make_option_lowercase(self, tmpdir): + # remove this test and the method make_option_lowercase() in setuptools.dist + # when no longer needed + fake_env( + tmpdir, '[metadata]\n' 'Name = foo\n' 'description = Some description\n' + ) + msg = ( + "Usage of uppercase key 'Name' in 'metadata' will be deprecated in " + "future versions. " + "Please use lowercase 'name' instead" + ) + with pytest.warns(UserWarning, match=msg): + with get_dist(tmpdir) as dist: + metadata = dist.metadata + + assert metadata.name == 'foo' + assert metadata.description == 'Some description' + + +class TestOptions: + def test_basic(self, tmpdir): + + fake_env( + tmpdir, + '[options]\n' + 'zip_safe = True\n' + 'include_package_data = yes\n' + 'package_dir = b=c, =src\n' + 'packages = pack_a, pack_b.subpack\n' + 'namespace_packages = pack1, pack2\n' + 'scripts = bin/one.py, bin/two.py\n' + 'eager_resources = bin/one.py, bin/two.py\n' + 'install_requires = docutils>=0.3; pack ==1.1, ==1.3; hey\n' + 'tests_require = mock==0.7.2; pytest\n' + 'setup_requires = docutils>=0.3; spack ==1.1, ==1.3; there\n' + 'dependency_links = http://some.com/here/1, ' + 'http://some.com/there/2\n' + 'python_requires = >=1.0, !=2.8\n' + 'py_modules = module1, module2\n', + ) + with get_dist(tmpdir) as dist: + assert dist.zip_safe + assert dist.include_package_data + assert dist.package_dir == {'': 'src', 'b': 'c'} + assert dist.packages == ['pack_a', 'pack_b.subpack'] + assert dist.namespace_packages == ['pack1', 'pack2'] + assert dist.scripts == ['bin/one.py', 'bin/two.py'] + assert dist.dependency_links == ( + ['http://some.com/here/1', 'http://some.com/there/2'] + ) + assert dist.install_requires == ( + ['docutils>=0.3', 'pack==1.1,==1.3', 'hey'] + ) + assert dist.setup_requires == ( + ['docutils>=0.3', 'spack ==1.1, ==1.3', 'there'] + ) + assert dist.tests_require == ['mock==0.7.2', 'pytest'] + assert dist.python_requires == '>=1.0, !=2.8' + assert dist.py_modules == ['module1', 'module2'] + + def test_multiline(self, tmpdir): + fake_env( + tmpdir, + '[options]\n' + 'package_dir = \n' + ' b=c\n' + ' =src\n' + 'packages = \n' + ' pack_a\n' + ' pack_b.subpack\n' + 'namespace_packages = \n' + ' pack1\n' + ' pack2\n' + 'scripts = \n' + ' bin/one.py\n' + ' bin/two.py\n' + 'eager_resources = \n' + ' bin/one.py\n' + ' bin/two.py\n' + 'install_requires = \n' + ' docutils>=0.3\n' + ' pack ==1.1, ==1.3\n' + ' hey\n' + 'tests_require = \n' + ' mock==0.7.2\n' + ' pytest\n' + 'setup_requires = \n' + ' docutils>=0.3\n' + ' spack ==1.1, ==1.3\n' + ' there\n' + 'dependency_links = \n' + ' http://some.com/here/1\n' + ' http://some.com/there/2\n', + ) + with get_dist(tmpdir) as dist: + assert dist.package_dir == {'': 'src', 'b': 'c'} + assert dist.packages == ['pack_a', 'pack_b.subpack'] + assert dist.namespace_packages == ['pack1', 'pack2'] + assert dist.scripts == ['bin/one.py', 'bin/two.py'] + assert dist.dependency_links == ( + ['http://some.com/here/1', 'http://some.com/there/2'] + ) + assert dist.install_requires == ( + ['docutils>=0.3', 'pack==1.1,==1.3', 'hey'] + ) + assert dist.setup_requires == ( + ['docutils>=0.3', 'spack ==1.1, ==1.3', 'there'] + ) + assert dist.tests_require == ['mock==0.7.2', 'pytest'] + + def test_package_dir_fail(self, tmpdir): + fake_env(tmpdir, '[options]\n' 'package_dir = a b\n') + with get_dist(tmpdir, parse=False) as dist: + with pytest.raises(DistutilsOptionError): + dist.parse_config_files() + + def test_package_data(self, tmpdir): + fake_env( + tmpdir, + '[options.package_data]\n' + '* = *.txt, *.rst\n' + 'hello = *.msg\n' + '\n' + '[options.exclude_package_data]\n' + '* = fake1.txt, fake2.txt\n' + 'hello = *.dat\n', + ) + + with get_dist(tmpdir) as dist: + assert dist.package_data == { + '': ['*.txt', '*.rst'], + 'hello': ['*.msg'], + } + assert dist.exclude_package_data == { + '': ['fake1.txt', 'fake2.txt'], + 'hello': ['*.dat'], + } + + def test_packages(self, tmpdir): + fake_env(tmpdir, '[options]\n' 'packages = find:\n') + + with get_dist(tmpdir) as dist: + assert dist.packages == ['fake_package'] + + def test_find_directive(self, tmpdir): + dir_package, config = fake_env(tmpdir, '[options]\n' 'packages = find:\n') + + dir_sub_one, _ = make_package_dir('sub_one', dir_package) + dir_sub_two, _ = make_package_dir('sub_two', dir_package) + + with get_dist(tmpdir) as dist: + assert set(dist.packages) == set( + ['fake_package', 'fake_package.sub_two', 'fake_package.sub_one'] + ) + + config.write( + '[options]\n' + 'packages = find:\n' + '\n' + '[options.packages.find]\n' + 'where = .\n' + 'include =\n' + ' fake_package.sub_one\n' + ' two\n' + ) + with get_dist(tmpdir) as dist: + assert dist.packages == ['fake_package.sub_one'] + + config.write( + '[options]\n' + 'packages = find:\n' + '\n' + '[options.packages.find]\n' + 'exclude =\n' + ' fake_package.sub_one\n' + ) + with get_dist(tmpdir) as dist: + assert set(dist.packages) == set(['fake_package', 'fake_package.sub_two']) + + def test_find_namespace_directive(self, tmpdir): + dir_package, config = fake_env( + tmpdir, '[options]\n' 'packages = find_namespace:\n' + ) + + dir_sub_one, _ = make_package_dir('sub_one', dir_package) + dir_sub_two, _ = make_package_dir('sub_two', dir_package, ns=True) + + with get_dist(tmpdir) as dist: + assert set(dist.packages) == { + 'fake_package', + 'fake_package.sub_two', + 'fake_package.sub_one', + } + + config.write( + '[options]\n' + 'packages = find_namespace:\n' + '\n' + '[options.packages.find]\n' + 'where = .\n' + 'include =\n' + ' fake_package.sub_one\n' + ' two\n' + ) + with get_dist(tmpdir) as dist: + assert dist.packages == ['fake_package.sub_one'] + + config.write( + '[options]\n' + 'packages = find_namespace:\n' + '\n' + '[options.packages.find]\n' + 'exclude =\n' + ' fake_package.sub_one\n' + ) + with get_dist(tmpdir) as dist: + assert set(dist.packages) == {'fake_package', 'fake_package.sub_two'} + + def test_extras_require(self, tmpdir): + fake_env( + tmpdir, + '[options.extras_require]\n' + 'pdf = ReportLab>=1.2; RXP\n' + 'rest = \n' + ' docutils>=0.3\n' + ' pack ==1.1, ==1.3\n', + ) + + with get_dist(tmpdir) as dist: + assert dist.extras_require == { + 'pdf': ['ReportLab>=1.2', 'RXP'], + 'rest': ['docutils>=0.3', 'pack==1.1,==1.3'], + } + assert dist.metadata.provides_extras == set(['pdf', 'rest']) + + def test_dash_preserved_extras_require(self, tmpdir): + fake_env(tmpdir, '[options.extras_require]\n' 'foo-a = foo\n' 'foo_b = test\n') + + with get_dist(tmpdir) as dist: + assert dist.extras_require == {'foo-a': ['foo'], 'foo_b': ['test']} + + def test_entry_points(self, tmpdir): + _, config = fake_env( + tmpdir, + '[options.entry_points]\n' + 'group1 = point1 = pack.module:func, ' + '.point2 = pack.module2:func_rest [rest]\n' + 'group2 = point3 = pack.module:func2\n', + ) + + with get_dist(tmpdir) as dist: + assert dist.entry_points == { + 'group1': [ + 'point1 = pack.module:func', + '.point2 = pack.module2:func_rest [rest]', + ], + 'group2': ['point3 = pack.module:func2'], + } + + expected = ( + '[blogtool.parsers]\n' + '.rst = some.nested.module:SomeClass.some_classmethod[reST]\n' + ) + + tmpdir.join('entry_points').write(expected) + + # From file. + config.write('[options]\n' 'entry_points = file: entry_points\n') + + with get_dist(tmpdir) as dist: + assert dist.entry_points == expected + + def test_case_sensitive_entry_points(self, tmpdir): + _, config = fake_env( + tmpdir, + '[options.entry_points]\n' + 'GROUP1 = point1 = pack.module:func, ' + '.point2 = pack.module2:func_rest [rest]\n' + 'group2 = point3 = pack.module:func2\n', + ) + + with get_dist(tmpdir) as dist: + assert dist.entry_points == { + 'GROUP1': [ + 'point1 = pack.module:func', + '.point2 = pack.module2:func_rest [rest]', + ], + 'group2': ['point3 = pack.module:func2'], + } + + def test_data_files(self, tmpdir): + fake_env( + tmpdir, + '[options.data_files]\n' + 'cfg =\n' + ' a/b.conf\n' + ' c/d.conf\n' + 'data = e/f.dat, g/h.dat\n', + ) + + with get_dist(tmpdir) as dist: + expected = [ + ('cfg', ['a/b.conf', 'c/d.conf']), + ('data', ['e/f.dat', 'g/h.dat']), + ] + assert sorted(dist.data_files) == sorted(expected) + + def test_data_files_globby(self, tmpdir): + fake_env( + tmpdir, + '[options.data_files]\n' + 'cfg =\n' + ' a/b.conf\n' + ' c/d.conf\n' + 'data = *.dat\n' + 'icons = \n' + ' *.ico\n' + 'audio = \n' + ' *.wav\n' + ' sounds.db\n' + ) + + # Create dummy files for glob()'s sake: + tmpdir.join('a.dat').write('') + tmpdir.join('b.dat').write('') + tmpdir.join('c.dat').write('') + tmpdir.join('a.ico').write('') + tmpdir.join('b.ico').write('') + tmpdir.join('c.ico').write('') + tmpdir.join('beep.wav').write('') + tmpdir.join('boop.wav').write('') + tmpdir.join('sounds.db').write('') + + with get_dist(tmpdir) as dist: + expected = [ + ('cfg', ['a/b.conf', 'c/d.conf']), + ('data', ['a.dat', 'b.dat', 'c.dat']), + ('icons', ['a.ico', 'b.ico', 'c.ico']), + ('audio', ['beep.wav', 'boop.wav', 'sounds.db']), + ] + assert sorted(dist.data_files) == sorted(expected) + + def test_python_requires_simple(self, tmpdir): + fake_env( + tmpdir, + DALS( + """ + [options] + python_requires=>=2.7 + """ + ), + ) + with get_dist(tmpdir) as dist: + dist.parse_config_files() + + def test_python_requires_compound(self, tmpdir): + fake_env( + tmpdir, + DALS( + """ + [options] + python_requires=>=2.7,!=3.0.* + """ + ), + ) + with get_dist(tmpdir) as dist: + dist.parse_config_files() + + def test_python_requires_invalid(self, tmpdir): + fake_env( + tmpdir, + DALS( + """ + [options] + python_requires=invalid + """ + ), + ) + with pytest.raises(Exception): + with get_dist(tmpdir) as dist: + dist.parse_config_files() + + def test_cmdclass(self, tmpdir): + class CustomCmd(Command): + pass + + m = types.ModuleType('custom_build', 'test package') + + m.__dict__['CustomCmd'] = CustomCmd + + sys.modules['custom_build'] = m + + fake_env( + tmpdir, + '[options]\n' 'cmdclass =\n' ' customcmd = custom_build.CustomCmd\n', + ) + + with get_dist(tmpdir) as dist: + assert dist.cmdclass == {'customcmd': CustomCmd} + + +saved_dist_init = _Distribution.__init__ + + +class TestExternalSetters: + # During creation of the setuptools Distribution() object, we call + # the init of the parent distutils Distribution object via + # _Distribution.__init__ (). + # + # It's possible distutils calls out to various keyword + # implementations (i.e. distutils.setup_keywords entry points) + # that may set a range of variables. + # + # This wraps distutil's Distribution.__init__ and simulates + # pbr or something else setting these values. + def _fake_distribution_init(self, dist, attrs): + saved_dist_init(dist, attrs) + # see self._DISTUTUILS_UNSUPPORTED_METADATA + setattr(dist.metadata, 'long_description_content_type', 'text/something') + # Test overwrite setup() args + setattr( + dist.metadata, + 'project_urls', + { + 'Link One': 'https://example.com/one/', + 'Link Two': 'https://example.com/two/', + }, + ) + return None + + @patch.object(_Distribution, '__init__', autospec=True) + def test_external_setters(self, mock_parent_init, tmpdir): + mock_parent_init.side_effect = self._fake_distribution_init + + dist = Distribution(attrs={'project_urls': {'will_be': 'ignored'}}) + + assert dist.metadata.long_description_content_type == 'text/something' + assert dist.metadata.project_urls == { + 'Link One': 'https://example.com/one/', + 'Link Two': 'https://example.com/two/', + } diff --git a/setuptools/tests/test_dep_util.py b/setuptools/tests/test_dep_util.py new file mode 100644 index 00000000..e5027c10 --- /dev/null +++ b/setuptools/tests/test_dep_util.py @@ -0,0 +1,30 @@ +from setuptools.dep_util import newer_pairwise_group +import os +import pytest + + +@pytest.fixture +def groups_target(tmpdir): + """Sets up some older sources, a target and newer sources. + Returns a 3-tuple in this order. + """ + creation_order = ['older.c', 'older.h', 'target.o', 'newer.c', 'newer.h'] + mtime = 0 + + for i in range(len(creation_order)): + creation_order[i] = os.path.join(str(tmpdir), creation_order[i]) + with open(creation_order[i], 'w'): + pass + + # make sure modification times are sequential + os.utime(creation_order[i], (mtime, mtime)) + mtime += 1 + + return creation_order[:2], creation_order[2], creation_order[3:] + + +def test_newer_pairwise_group(groups_target): + older = newer_pairwise_group([groups_target[0]], [groups_target[1]]) + newer = newer_pairwise_group([groups_target[2]], [groups_target[1]]) + assert older == ([], []) + assert newer == ([groups_target[2]], [groups_target[1]]) diff --git a/setuptools/tests/test_depends.py b/setuptools/tests/test_depends.py new file mode 100644 index 00000000..bff1dfb1 --- /dev/null +++ b/setuptools/tests/test_depends.py @@ -0,0 +1,16 @@ +import sys + +from setuptools import depends + + +class TestGetModuleConstant: + + def test_basic(self): + """ + Invoke get_module_constant on a module in + the test package. + """ + mod_name = 'setuptools.tests.mod_with_constant' + val = depends.get_module_constant(mod_name, 'value') + assert val == 'three, sir!' + assert 'setuptools.tests.mod_with_constant' not in sys.modules diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py new file mode 100644 index 00000000..70c5794c --- /dev/null +++ b/setuptools/tests/test_develop.py @@ -0,0 +1,224 @@ +"""develop tests +""" + +import os +import sys +import subprocess +import platform +import pathlib +import textwrap + +from setuptools.command import test + +import pytest + +from setuptools.command.develop import develop +from setuptools.dist import Distribution +from . import contexts +from . import namespaces + +SETUP_PY = """\ +from setuptools import setup + +setup(name='foo', + packages=['foo'], +) +""" + +INIT_PY = """print "foo" +""" + + +@pytest.fixture +def temp_user(monkeypatch): + with contexts.tempdir() as user_base: + with contexts.tempdir() as user_site: + monkeypatch.setattr('site.USER_BASE', user_base) + monkeypatch.setattr('site.USER_SITE', user_site) + yield + + +@pytest.fixture +def test_env(tmpdir, temp_user): + target = tmpdir + foo = target.mkdir('foo') + setup = target / 'setup.py' + if setup.isfile(): + raise ValueError(dir(target)) + with setup.open('w') as f: + f.write(SETUP_PY) + init = foo / '__init__.py' + with init.open('w') as f: + f.write(INIT_PY) + with target.as_cwd(): + yield target + + +class TestDevelop: + in_virtualenv = hasattr(sys, 'real_prefix') + in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix + + def test_console_scripts(self, tmpdir): + """ + Test that console scripts are installed and that they reference + only the project by name and not the current version. + """ + pytest.skip( + "TODO: needs a fixture to cause 'develop' " + "to be invoked without mutating environment." + ) + settings = dict( + name='foo', + packages=['foo'], + version='0.0', + entry_points={ + 'console_scripts': [ + 'foocmd = foo:foo', + ], + }, + ) + dist = Distribution(settings) + dist.script_name = 'setup.py' + cmd = develop(dist) + cmd.ensure_finalized() + cmd.install_dir = tmpdir + cmd.run() + # assert '0.0' not in foocmd_text + + +class TestResolver: + """ + TODO: These tests were written with a minimal understanding + of what _resolve_setup_path is intending to do. Come up with + more meaningful cases that look like real-world scenarios. + """ + + def test_resolve_setup_path_cwd(self): + assert develop._resolve_setup_path('.', '.', '.') == '.' + + def test_resolve_setup_path_one_dir(self): + assert develop._resolve_setup_path('pkgs', '.', 'pkgs') == '../' + + def test_resolve_setup_path_one_dir_trailing_slash(self): + assert develop._resolve_setup_path('pkgs/', '.', 'pkgs') == '../' + + +class TestNamespaces: + @staticmethod + def install_develop(src_dir, target): + + develop_cmd = [ + sys.executable, + 'setup.py', + 'develop', + '--install-dir', + str(target), + ] + with src_dir.as_cwd(): + with test.test.paths_on_pythonpath([str(target)]): + subprocess.check_call(develop_cmd) + + @pytest.mark.skipif( + bool(os.environ.get("APPVEYOR")), + reason="https://github.com/pypa/setuptools/issues/851", + ) + @pytest.mark.skipif( + platform.python_implementation() == 'PyPy', + reason="https://github.com/pypa/setuptools/issues/1202", + ) + def test_namespace_package_importable(self, tmpdir): + """ + Installing two packages sharing the same namespace, one installed + naturally using pip or `--single-version-externally-managed` + and the other installed using `develop` should leave the namespace + in tact and both packages reachable by import. + """ + pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA') + pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB') + target = tmpdir / 'packages' + # use pip to install to the target directory + install_cmd = [ + sys.executable, + '-m', + 'pip', + 'install', + str(pkg_A), + '-t', + str(target), + ] + subprocess.check_call(install_cmd) + self.install_develop(pkg_B, target) + namespaces.make_site_dir(target) + try_import = [ + sys.executable, + '-c', + 'import myns.pkgA; import myns.pkgB', + ] + with test.test.paths_on_pythonpath([str(target)]): + subprocess.check_call(try_import) + + # additionally ensure that pkg_resources import works + pkg_resources_imp = [ + sys.executable, + '-c', + 'import pkg_resources', + ] + with test.test.paths_on_pythonpath([str(target)]): + subprocess.check_call(pkg_resources_imp) + + @staticmethod + def install_workaround(site_packages): + site_packages.mkdir(parents=True) + sc = site_packages / 'sitecustomize.py' + sc.write_text( + textwrap.dedent( + """ + import site + import pathlib + here = pathlib.Path(__file__).parent + site.addsitedir(str(here)) + """ + ).lstrip() + ) + + @pytest.mark.xfail( + platform.python_implementation() == 'PyPy', + reason="Workaround fails on PyPy (why?)", + ) + def test_editable_prefix(self, tmp_path, sample_project): + """ + Editable install to a prefix should be discoverable. + """ + prefix = tmp_path / 'prefix' + prefix.mkdir() + + # figure out where pip will likely install the package + site_packages = prefix / next( + pathlib.Path(path).relative_to(sys.prefix) + for path in sys.path + if 'site-packages' in path and path.startswith(sys.prefix) + ) + + # install the workaround + self.install_workaround(site_packages) + + env = dict(os.environ, PYTHONPATH=str(site_packages)) + cmd = [ + sys.executable, + '-m', + 'pip', + 'install', + '--editable', + str(sample_project), + '--prefix', + str(prefix), + '--no-build-isolation', + ] + subprocess.check_call(cmd, env=env) + + # now run 'sample' with the prefix on the PYTHONPATH + bin = 'Scripts' if platform.system() == 'Windows' else 'bin' + exe = prefix / bin / 'sample' + if sys.version_info < (3, 7) and platform.system() == 'Windows': + exe = str(exe) + subprocess.check_call([exe], env=env) diff --git a/setuptools/tests/test_dist.py b/setuptools/tests/test_dist.py new file mode 100644 index 00000000..c4279f0b --- /dev/null +++ b/setuptools/tests/test_dist.py @@ -0,0 +1,376 @@ +import io +import collections +import re +import functools +import urllib.request +import urllib.parse +from distutils.errors import DistutilsSetupError +from setuptools.dist import ( + _get_unpatched, + check_package_data, + DistDeprecationWarning, + check_specifier, + rfc822_escape, + rfc822_unescape, +) +from setuptools import sic +from setuptools import Distribution + +from .textwrap import DALS +from .test_easy_install import make_nspkg_sdist + +import pytest + + +def test_dist_fetch_build_egg(tmpdir): + """ + Check multiple calls to `Distribution.fetch_build_egg` work as expected. + """ + index = tmpdir.mkdir('index') + index_url = urllib.parse.urljoin( + 'file://', urllib.request.pathname2url(str(index))) + + def sdist_with_index(distname, version): + dist_dir = index.mkdir(distname) + dist_sdist = '%s-%s.tar.gz' % (distname, version) + make_nspkg_sdist(str(dist_dir.join(dist_sdist)), distname, version) + with dist_dir.join('index.html').open('w') as fp: + fp.write(DALS( + ''' + <!DOCTYPE html><html><body> + <a href="{dist_sdist}" rel="internal">{dist_sdist}</a><br/> + </body></html> + ''' + ).format(dist_sdist=dist_sdist)) + sdist_with_index('barbazquux', '3.2.0') + sdist_with_index('barbazquux-runner', '2.11.1') + with tmpdir.join('setup.cfg').open('w') as fp: + fp.write(DALS( + ''' + [easy_install] + index_url = {index_url} + ''' + ).format(index_url=index_url)) + reqs = ''' + barbazquux-runner + barbazquux + '''.split() + with tmpdir.as_cwd(): + dist = Distribution() + dist.parse_config_files() + resolved_dists = [ + dist.fetch_build_egg(r) + for r in reqs + ] + assert [dist.key for dist in resolved_dists if dist] == reqs + + +def test_dist__get_unpatched_deprecated(): + pytest.warns(DistDeprecationWarning, _get_unpatched, [""]) + + +def __read_test_cases(): + base = dict( + name="package", + version="0.0.1", + author="Foo Bar", + author_email="foo@bar.net", + long_description="Long\ndescription", + description="Short description", + keywords=["one", "two"], + ) + + params = functools.partial(dict, base) + + test_cases = [ + ('Metadata version 1.0', params()), + ('Metadata Version 1.0: Short long description', params( + long_description='Short long description', + )), + ('Metadata version 1.1: Classifiers', params( + classifiers=[ + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.7', + 'License :: OSI Approved :: MIT License', + ], + )), + ('Metadata version 1.1: Download URL', params( + download_url='https://example.com', + )), + ('Metadata Version 1.2: Requires-Python', params( + python_requires='>=3.7', + )), + pytest.param( + 'Metadata Version 1.2: Project-Url', + params(project_urls=dict(Foo='https://example.bar')), + marks=pytest.mark.xfail( + reason="Issue #1578: project_urls not read", + ), + ), + ('Metadata Version 2.1: Long Description Content Type', params( + long_description_content_type='text/x-rst; charset=UTF-8', + )), + ('License', params(license='MIT', )), + ('License multiline', params( + license='This is a long license \nover multiple lines', + )), + pytest.param( + 'Metadata Version 2.1: Provides Extra', + params(provides_extras=['foo', 'bar']), + marks=pytest.mark.xfail(reason="provides_extras not read"), + ), + ('Missing author', dict( + name='foo', + version='1.0.0', + author_email='snorri@sturluson.name', + )), + ('Missing author e-mail', dict( + name='foo', + version='1.0.0', + author='Snorri Sturluson', + )), + ('Missing author and e-mail', dict( + name='foo', + version='1.0.0', + )), + ('Bypass normalized version', dict( + name='foo', + version=sic('1.0.0a'), + )), + ] + + return test_cases + + +@pytest.mark.parametrize('name,attrs', __read_test_cases()) +def test_read_metadata(name, attrs): + dist = Distribution(attrs) + metadata_out = dist.metadata + dist_class = metadata_out.__class__ + + # Write to PKG_INFO and then load into a new metadata object + PKG_INFO = io.StringIO() + + metadata_out.write_pkg_file(PKG_INFO) + + PKG_INFO.seek(0) + metadata_in = dist_class() + metadata_in.read_pkg_file(PKG_INFO) + + tested_attrs = [ + ('name', dist_class.get_name), + ('version', dist_class.get_version), + ('author', dist_class.get_contact), + ('author_email', dist_class.get_contact_email), + ('metadata_version', dist_class.get_metadata_version), + ('provides', dist_class.get_provides), + ('description', dist_class.get_description), + ('long_description', dist_class.get_long_description), + ('download_url', dist_class.get_download_url), + ('keywords', dist_class.get_keywords), + ('platforms', dist_class.get_platforms), + ('obsoletes', dist_class.get_obsoletes), + ('requires', dist_class.get_requires), + ('classifiers', dist_class.get_classifiers), + ('project_urls', lambda s: getattr(s, 'project_urls', {})), + ('provides_extras', lambda s: getattr(s, 'provides_extras', set())), + ] + + for attr, getter in tested_attrs: + assert getter(metadata_in) == getter(metadata_out) + + +def __maintainer_test_cases(): + attrs = {"name": "package", + "version": "1.0", + "description": "xxx"} + + def merge_dicts(d1, d2): + d1 = d1.copy() + d1.update(d2) + + return d1 + + test_cases = [ + ('No author, no maintainer', attrs.copy()), + ('Author (no e-mail), no maintainer', merge_dicts( + attrs, + {'author': 'Author Name'})), + ('Author (e-mail), no maintainer', merge_dicts( + attrs, + {'author': 'Author Name', + 'author_email': 'author@name.com'})), + ('No author, maintainer (no e-mail)', merge_dicts( + attrs, + {'maintainer': 'Maintainer Name'})), + ('No author, maintainer (e-mail)', merge_dicts( + attrs, + {'maintainer': 'Maintainer Name', + 'maintainer_email': 'maintainer@name.com'})), + ('Author (no e-mail), Maintainer (no-email)', merge_dicts( + attrs, + {'author': 'Author Name', + 'maintainer': 'Maintainer Name'})), + ('Author (e-mail), Maintainer (e-mail)', merge_dicts( + attrs, + {'author': 'Author Name', + 'author_email': 'author@name.com', + 'maintainer': 'Maintainer Name', + 'maintainer_email': 'maintainer@name.com'})), + ('No author (e-mail), no maintainer (e-mail)', merge_dicts( + attrs, + {'author_email': 'author@name.com', + 'maintainer_email': 'maintainer@name.com'})), + ('Author unicode', merge_dicts( + attrs, + {'author': '鉄沢寛'})), + ('Maintainer unicode', merge_dicts( + attrs, + {'maintainer': 'Jan Łukasiewicz'})), + ] + + return test_cases + + +@pytest.mark.parametrize('name,attrs', __maintainer_test_cases()) +def test_maintainer_author(name, attrs, tmpdir): + tested_keys = { + 'author': 'Author', + 'author_email': 'Author-email', + 'maintainer': 'Maintainer', + 'maintainer_email': 'Maintainer-email', + } + + # Generate a PKG-INFO file + dist = Distribution(attrs) + fn = tmpdir.mkdir('pkg_info') + fn_s = str(fn) + + dist.metadata.write_pkg_info(fn_s) + + with io.open(str(fn.join('PKG-INFO')), 'r', encoding='utf-8') as f: + raw_pkg_lines = f.readlines() + + # Drop blank lines and strip lines from default description + pkg_lines = list(filter(None, raw_pkg_lines[:-2])) + + pkg_lines_set = set(pkg_lines) + + # Duplicate lines should not be generated + assert len(pkg_lines) == len(pkg_lines_set) + + for fkey, dkey in tested_keys.items(): + val = attrs.get(dkey, None) + if val is None: + for line in pkg_lines: + assert not line.startswith(fkey + ':') + else: + line = '%s: %s' % (fkey, val) + assert line in pkg_lines_set + + +def test_provides_extras_deterministic_order(): + extras = collections.OrderedDict() + extras['a'] = ['foo'] + extras['b'] = ['bar'] + attrs = dict(extras_require=extras) + dist = Distribution(attrs) + assert dist.metadata.provides_extras == ['a', 'b'] + attrs['extras_require'] = collections.OrderedDict( + reversed(list(attrs['extras_require'].items()))) + dist = Distribution(attrs) + assert dist.metadata.provides_extras == ['b', 'a'] + + +CHECK_PACKAGE_DATA_TESTS = ( + # Valid. + ({ + '': ['*.txt', '*.rst'], + 'hello': ['*.msg'], + }, None), + # Not a dictionary. + (( + ('', ['*.txt', '*.rst']), + ('hello', ['*.msg']), + ), ( + "'package_data' must be a dictionary mapping package" + " names to lists of string wildcard patterns" + )), + # Invalid key type. + ({ + 400: ['*.txt', '*.rst'], + }, ( + "keys of 'package_data' dict must be strings (got 400)" + )), + # Invalid value type. + ({ + 'hello': str('*.msg'), + }, ( + "\"values of 'package_data' dict\" " + "must be a list of strings (got '*.msg')" + )), + # Invalid value type (generators are single use) + ({ + 'hello': (x for x in "generator"), + }, ( + "\"values of 'package_data' dict\" must be a list of strings " + "(got <generator object" + )), +) + + +@pytest.mark.parametrize( + 'package_data, expected_message', CHECK_PACKAGE_DATA_TESTS) +def test_check_package_data(package_data, expected_message): + if expected_message is None: + assert check_package_data(None, 'package_data', package_data) is None + else: + with pytest.raises( + DistutilsSetupError, match=re.escape(expected_message)): + check_package_data(None, str('package_data'), package_data) + + +def test_check_specifier(): + # valid specifier value + attrs = {'name': 'foo', 'python_requires': '>=3.0, !=3.1'} + dist = Distribution(attrs) + check_specifier(dist, attrs, attrs['python_requires']) + + # invalid specifier value + attrs = {'name': 'foo', 'python_requires': ['>=3.0', '!=3.1']} + with pytest.raises(DistutilsSetupError): + dist = Distribution(attrs) + + +@pytest.mark.parametrize( + 'content, result', + ( + pytest.param( + "Just a single line", + None, + id="single_line", + ), + pytest.param( + "Multiline\nText\nwithout\nextra indents\n", + None, + id="multiline", + ), + pytest.param( + "Multiline\n With\n\nadditional\n indentation", + None, + id="multiline_with_indentation", + ), + pytest.param( + " Leading whitespace", + "Leading whitespace", + id="remove_leading_whitespace", + ), + pytest.param( + " Leading whitespace\nIn\n Multiline comment", + "Leading whitespace\nIn\n Multiline comment", + id="remove_leading_whitespace_multiline", + ), + ) +) +def test_rfc822_unescape(content, result): + assert (result or content) == rfc822_unescape(rfc822_escape(content)) diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py new file mode 100644 index 00000000..29fbd09d --- /dev/null +++ b/setuptools/tests/test_dist_info.py @@ -0,0 +1,74 @@ +"""Test .dist-info style distributions. +""" + +import pytest + +import pkg_resources +from .textwrap import DALS + + +class TestDistInfo: + + metadata_base = DALS(""" + Metadata-Version: 1.2 + Requires-Dist: splort (==4) + Provides-Extra: baz + Requires-Dist: quux (>=1.1); extra == 'baz' + """) + + @classmethod + def build_metadata(cls, **kwargs): + lines = ( + '{key}: {value}\n'.format(**locals()) + for key, value in kwargs.items() + ) + return cls.metadata_base + ''.join(lines) + + @pytest.fixture + def metadata(self, tmpdir): + dist_info_name = 'VersionedDistribution-2.718.dist-info' + versioned = tmpdir / dist_info_name + versioned.mkdir() + filename = versioned / 'METADATA' + content = self.build_metadata( + Name='VersionedDistribution', + ) + filename.write_text(content, encoding='utf-8') + + dist_info_name = 'UnversionedDistribution.dist-info' + unversioned = tmpdir / dist_info_name + unversioned.mkdir() + filename = unversioned / 'METADATA' + content = self.build_metadata( + Name='UnversionedDistribution', + Version='0.3', + ) + filename.write_text(content, encoding='utf-8') + + return str(tmpdir) + + def test_distinfo(self, metadata): + dists = dict( + (d.project_name, d) + for d in pkg_resources.find_distributions(metadata) + ) + + assert len(dists) == 2, dists + + unversioned = dists['UnversionedDistribution'] + versioned = dists['VersionedDistribution'] + + assert versioned.version == '2.718' # from filename + assert unversioned.version == '0.3' # from METADATA + + def test_conditional_dependencies(self, metadata): + specs = 'splort==4', 'quux>=1.1' + requires = list(map(pkg_resources.Requirement.parse, specs)) + + for d in pkg_resources.find_distributions(metadata): + assert d.requires() == requires[:1] + assert d.requires(extras=('baz',)) == [ + requires[0], + pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'), + ] + assert d.extras == ['baz'] diff --git a/setuptools/tests/test_distutils_adoption.py b/setuptools/tests/test_distutils_adoption.py new file mode 100644 index 00000000..b6b9c00e --- /dev/null +++ b/setuptools/tests/test_distutils_adoption.py @@ -0,0 +1,76 @@ +import os +import sys +import functools +import subprocess +import platform + +import pytest +import jaraco.envs +import path + + +IS_PYPY = '__pypy__' in sys.builtin_module_names + + +class VirtualEnv(jaraco.envs.VirtualEnv): + name = '.env' + # Some version of PyPy will import distutils on startup, implicitly + # importing setuptools, and thus leading to BackendInvalid errors + # when upgrading Setuptools. Bypass this behavior by avoiding the + # early availability and need to upgrade. + create_opts = ['--no-setuptools'] + + def run(self, cmd, *args, **kwargs): + cmd = [self.exe(cmd[0])] + cmd[1:] + return subprocess.check_output(cmd, *args, cwd=self.root, **kwargs) + + +@pytest.fixture +def venv(tmp_path, tmp_src): + env = VirtualEnv() + env.root = path.Path(tmp_path / 'venv') + env.req = str(tmp_src) + return env.create() + + +def popen_text(call): + """ + Augment the Popen call with the parameters to ensure unicode text. + """ + return functools.partial(call, universal_newlines=True) \ + if sys.version_info < (3, 7) else functools.partial(call, text=True) + + +def find_distutils(venv, imports='distutils', env=None, **kwargs): + py_cmd = 'import {imports}; print(distutils.__file__)'.format(**locals()) + cmd = ['python', '-c', py_cmd] + if platform.system() == 'Windows': + env['SYSTEMROOT'] = os.environ['SYSTEMROOT'] + return popen_text(venv.run)(cmd, env=env, **kwargs) + + +def test_distutils_stdlib(venv): + """ + Ensure stdlib distutils is used when appropriate. + """ + env = dict(SETUPTOOLS_USE_DISTUTILS='stdlib') + assert venv.name not in find_distutils(venv, env=env).split(os.sep) + + +def test_distutils_local_with_setuptools(venv): + """ + Ensure local distutils is used when appropriate. + """ + env = dict(SETUPTOOLS_USE_DISTUTILS='local') + loc = find_distutils(venv, imports='setuptools, distutils', env=env) + assert venv.name in loc.split(os.sep) + + +@pytest.mark.xfail('IS_PYPY', reason='pypy imports distutils on startup') +def test_distutils_local(venv): + """ + Even without importing, the setuptools-local copy of distutils is + preferred. + """ + env = dict(SETUPTOOLS_USE_DISTUTILS='local') + assert venv.name in find_distutils(venv, env=env).split(os.sep) diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py new file mode 100644 index 00000000..6840d03b --- /dev/null +++ b/setuptools/tests/test_easy_install.py @@ -0,0 +1,1060 @@ +"""Easy install Tests +""" + +import sys +import os +import tempfile +import site +import contextlib +import tarfile +import logging +import itertools +import distutils.errors +import io +import zipfile +import mock +import time +import re +import subprocess +import pathlib + +import pytest +from jaraco import path + +from setuptools import sandbox +from setuptools.sandbox import run_setup +import setuptools.command.easy_install as ei +from setuptools.command.easy_install import ( + EasyInstallDeprecationWarning, ScriptWriter, PthDistributions, + WindowsScriptWriter, +) +from setuptools.dist import Distribution +from pkg_resources import normalize_path, working_set +from pkg_resources import Distribution as PRDistribution +from setuptools.tests.server import MockServer, path_to_url +from setuptools.tests import fail_on_ascii +import pkg_resources + +from . import contexts +from .textwrap import DALS + + +@pytest.fixture(autouse=True) +def pip_disable_index(monkeypatch): + """ + Important: Disable the default index for pip to avoid + querying packages in the index and potentially resolving + and installing packages there. + """ + monkeypatch.setenv('PIP_NO_INDEX', 'true') + + +class FakeDist: + def get_entry_map(self, group): + if group != 'console_scripts': + return {} + return {str('name'): 'ep'} + + def as_requirement(self): + return 'spec' + + +SETUP_PY = DALS(""" + from setuptools import setup + + setup(name='foo') + """) + + +class TestEasyInstallTest: + def test_get_script_args(self): + header = ei.CommandSpec.best().from_environment().as_header() + dist = FakeDist() + args = next(ei.ScriptWriter.get_args(dist)) + name, script = itertools.islice(args, 2) + assert script.startswith(header) + assert "'spec'" in script + assert "'console_scripts'" in script + assert "'name'" in script + assert re.search( + '^# EASY-INSTALL-ENTRY-SCRIPT', script, flags=re.MULTILINE) + + def test_no_find_links(self): + # new option '--no-find-links', that blocks find-links added at + # the project level + dist = Distribution() + cmd = ei.easy_install(dist) + cmd.check_pth_processing = lambda: True + cmd.no_find_links = True + cmd.find_links = ['link1', 'link2'] + cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') + cmd.args = ['ok'] + cmd.ensure_finalized() + assert cmd.package_index.scanned_urls == {} + + # let's try without it (default behavior) + cmd = ei.easy_install(dist) + cmd.check_pth_processing = lambda: True + cmd.find_links = ['link1', 'link2'] + cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') + cmd.args = ['ok'] + cmd.ensure_finalized() + keys = sorted(cmd.package_index.scanned_urls.keys()) + assert keys == ['link1', 'link2'] + + def test_write_exception(self): + """ + Test that `cant_write_to_target` is rendered as a DistutilsError. + """ + dist = Distribution() + cmd = ei.easy_install(dist) + cmd.install_dir = os.getcwd() + with pytest.raises(distutils.errors.DistutilsError): + cmd.cant_write_to_target() + + def test_all_site_dirs(self, monkeypatch): + """ + get_site_dirs should always return site dirs reported by + site.getsitepackages. + """ + path = normalize_path('/setuptools/test/site-packages') + + def mock_gsp(): + return [path] + monkeypatch.setattr(site, 'getsitepackages', mock_gsp, raising=False) + assert path in ei.get_site_dirs() + + def test_all_site_dirs_works_without_getsitepackages(self, monkeypatch): + monkeypatch.delattr(site, 'getsitepackages', raising=False) + assert ei.get_site_dirs() + + @pytest.fixture + def sdist_unicode(self, tmpdir): + files = [ + ( + 'setup.py', + DALS(""" + import setuptools + setuptools.setup( + name="setuptools-test-unicode", + version="1.0", + packages=["mypkg"], + include_package_data=True, + ) + """), + ), + ( + 'mypkg/__init__.py', + "", + ), + ( + 'mypkg/☃.txt', + "", + ), + ] + sdist_name = 'setuptools-test-unicode-1.0.zip' + sdist = tmpdir / sdist_name + # can't use make_sdist, because the issue only occurs + # with zip sdists. + sdist_zip = zipfile.ZipFile(str(sdist), 'w') + for filename, content in files: + sdist_zip.writestr(filename, content) + sdist_zip.close() + return str(sdist) + + @fail_on_ascii + def test_unicode_filename_in_sdist( + self, sdist_unicode, tmpdir, monkeypatch): + """ + The install command should execute correctly even if + the package has unicode filenames. + """ + dist = Distribution({'script_args': ['easy_install']}) + target = (tmpdir / 'target').ensure_dir() + cmd = ei.easy_install( + dist, + install_dir=str(target), + args=['x'], + ) + monkeypatch.setitem(os.environ, 'PYTHONPATH', str(target)) + cmd.ensure_finalized() + cmd.easy_install(sdist_unicode) + + @pytest.fixture + def sdist_unicode_in_script(self, tmpdir): + files = [ + ( + "setup.py", + DALS(""" + import setuptools + setuptools.setup( + name="setuptools-test-unicode", + version="1.0", + packages=["mypkg"], + include_package_data=True, + scripts=['mypkg/unicode_in_script'], + ) + """), + ), + ("mypkg/__init__.py", ""), + ( + "mypkg/unicode_in_script", + DALS( + """ + #!/bin/sh + # á + + non_python_fn() { + } + """), + ), + ] + sdist_name = "setuptools-test-unicode-script-1.0.zip" + sdist = tmpdir / sdist_name + # can't use make_sdist, because the issue only occurs + # with zip sdists. + sdist_zip = zipfile.ZipFile(str(sdist), "w") + for filename, content in files: + sdist_zip.writestr(filename, content.encode('utf-8')) + sdist_zip.close() + return str(sdist) + + @fail_on_ascii + def test_unicode_content_in_sdist( + self, sdist_unicode_in_script, tmpdir, monkeypatch): + """ + The install command should execute correctly even if + the package has unicode in scripts. + """ + dist = Distribution({"script_args": ["easy_install"]}) + target = (tmpdir / "target").ensure_dir() + cmd = ei.easy_install(dist, install_dir=str(target), args=["x"]) + monkeypatch.setitem(os.environ, "PYTHONPATH", str(target)) + cmd.ensure_finalized() + cmd.easy_install(sdist_unicode_in_script) + + @pytest.fixture + def sdist_script(self, tmpdir): + files = [ + ( + 'setup.py', + DALS(""" + import setuptools + setuptools.setup( + name="setuptools-test-script", + version="1.0", + scripts=["mypkg_script"], + ) + """), + ), + ( + 'mypkg_script', + DALS(""" + #/usr/bin/python + print('mypkg_script') + """), + ), + ] + sdist_name = 'setuptools-test-script-1.0.zip' + sdist = str(tmpdir / sdist_name) + make_sdist(sdist, files) + return sdist + + @pytest.mark.skipif(not sys.platform.startswith('linux'), + reason="Test can only be run on Linux") + def test_script_install(self, sdist_script, tmpdir, monkeypatch): + """ + Check scripts are installed. + """ + dist = Distribution({'script_args': ['easy_install']}) + target = (tmpdir / 'target').ensure_dir() + cmd = ei.easy_install( + dist, + install_dir=str(target), + args=['x'], + ) + monkeypatch.setitem(os.environ, 'PYTHONPATH', str(target)) + cmd.ensure_finalized() + cmd.easy_install(sdist_script) + assert (target / 'mypkg_script').exists() + + def test_dist_get_script_args_deprecated(self): + with pytest.warns(EasyInstallDeprecationWarning): + ScriptWriter.get_script_args(None, None) + + def test_dist_get_script_header_deprecated(self): + with pytest.warns(EasyInstallDeprecationWarning): + ScriptWriter.get_script_header("") + + def test_dist_get_writer_deprecated(self): + with pytest.warns(EasyInstallDeprecationWarning): + ScriptWriter.get_writer(None) + + def test_dist_WindowsScriptWriter_get_writer_deprecated(self): + with pytest.warns(EasyInstallDeprecationWarning): + WindowsScriptWriter.get_writer() + + +@pytest.mark.filterwarnings('ignore:Unbuilt egg') +class TestPTHFileWriter: + def test_add_from_cwd_site_sets_dirty(self): + '''a pth file manager should set dirty + if a distribution is in site but also the cwd + ''' + pth = PthDistributions('does-not_exist', [os.getcwd()]) + assert not pth.dirty + pth.add(PRDistribution(os.getcwd())) + assert pth.dirty + + def test_add_from_site_is_ignored(self): + location = '/test/location/does-not-have-to-exist' + # PthDistributions expects all locations to be normalized + location = pkg_resources.normalize_path(location) + pth = PthDistributions('does-not_exist', [location, ]) + assert not pth.dirty + pth.add(PRDistribution(location)) + assert not pth.dirty + + +@pytest.fixture +def setup_context(tmpdir): + with (tmpdir / 'setup.py').open('w') as f: + f.write(SETUP_PY) + with tmpdir.as_cwd(): + yield tmpdir + + +@pytest.mark.usefixtures("user_override") +@pytest.mark.usefixtures("setup_context") +class TestUserInstallTest: + + # prevent check that site-packages is writable. easy_install + # shouldn't be writing to system site-packages during finalize + # options, but while it does, bypass the behavior. + prev_sp_write = mock.patch( + 'setuptools.command.easy_install.easy_install.check_site_dir', + mock.Mock(), + ) + + # simulate setuptools installed in user site packages + @mock.patch('setuptools.command.easy_install.__file__', site.USER_SITE) + @mock.patch('site.ENABLE_USER_SITE', True) + @prev_sp_write + def test_user_install_not_implied_user_site_enabled(self): + self.assert_not_user_site() + + @mock.patch('site.ENABLE_USER_SITE', False) + @prev_sp_write + def test_user_install_not_implied_user_site_disabled(self): + self.assert_not_user_site() + + @staticmethod + def assert_not_user_site(): + # create a finalized easy_install command + dist = Distribution() + dist.script_name = 'setup.py' + cmd = ei.easy_install(dist) + cmd.args = ['py'] + cmd.ensure_finalized() + assert not cmd.user, 'user should not be implied' + + def test_multiproc_atexit(self): + pytest.importorskip('multiprocessing') + + log = logging.getLogger('test_easy_install') + logging.basicConfig(level=logging.INFO, stream=sys.stderr) + log.info('this should not break') + + @pytest.fixture() + def foo_package(self, tmpdir): + egg_file = tmpdir / 'foo-1.0.egg-info' + with egg_file.open('w') as f: + f.write('Name: foo\n') + return str(tmpdir) + + @pytest.fixture() + def install_target(self, tmpdir): + target = str(tmpdir) + with mock.patch('sys.path', sys.path + [target]): + python_path = os.path.pathsep.join(sys.path) + with mock.patch.dict(os.environ, PYTHONPATH=python_path): + yield target + + def test_local_index(self, foo_package, install_target): + """ + The local index must be used when easy_install locates installed + packages. + """ + dist = Distribution() + dist.script_name = 'setup.py' + cmd = ei.easy_install(dist) + cmd.install_dir = install_target + cmd.args = ['foo'] + cmd.ensure_finalized() + cmd.local_index.scan([foo_package]) + res = cmd.easy_install('foo') + actual = os.path.normcase(os.path.realpath(res.location)) + expected = os.path.normcase(os.path.realpath(foo_package)) + assert actual == expected + + @contextlib.contextmanager + def user_install_setup_context(self, *args, **kwargs): + """ + Wrap sandbox.setup_context to patch easy_install in that context to + appear as user-installed. + """ + with self.orig_context(*args, **kwargs): + import setuptools.command.easy_install as ei + ei.__file__ = site.USER_SITE + yield + + def patched_setup_context(self): + self.orig_context = sandbox.setup_context + + return mock.patch( + 'setuptools.sandbox.setup_context', + self.user_install_setup_context, + ) + + +@pytest.fixture +def distutils_package(): + distutils_setup_py = SETUP_PY.replace( + 'from setuptools import setup', + 'from distutils.core import setup', + ) + with contexts.tempdir(cd=os.chdir): + with open('setup.py', 'w') as f: + f.write(distutils_setup_py) + yield + + +@pytest.fixture +def mock_index(): + # set up a server which will simulate an alternate package index. + p_index = MockServer() + if p_index.server_port == 0: + # Some platforms (Jython) don't find a port to which to bind, + # so skip test for them. + pytest.skip("could not find a valid port") + p_index.start() + return p_index + + +class TestDistutilsPackage: + def test_bdist_egg_available_on_distutils_pkg(self, distutils_package): + run_setup('setup.py', ['bdist_egg']) + + +class TestSetupRequires: + + def test_setup_requires_honors_fetch_params(self, mock_index, monkeypatch): + """ + When easy_install installs a source distribution which specifies + setup_requires, it should honor the fetch parameters (such as + index-url, and find-links). + """ + monkeypatch.setenv(str('PIP_RETRIES'), str('0')) + monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + monkeypatch.setenv('PIP_NO_INDEX', 'false') + with contexts.quiet(): + # create an sdist that has a build-time dependency. + with TestSetupRequires.create_sdist() as dist_file: + with contexts.tempdir() as temp_install_dir: + with contexts.environment(PYTHONPATH=temp_install_dir): + cmd = [ + sys.executable, + '-m', 'setup', + 'easy_install', + '--index-url', mock_index.url, + '--exclude-scripts', + '--install-dir', temp_install_dir, + dist_file, + ] + subprocess.Popen(cmd).wait() + # there should have been one requests to the server + assert [r.path for r in mock_index.requests] == ['/does-not-exist/'] + + @staticmethod + @contextlib.contextmanager + def create_sdist(): + """ + Return an sdist with a setup_requires dependency (of something that + doesn't exist) + """ + with contexts.tempdir() as dir: + dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') + make_sdist(dist_path, [ + ('setup.py', DALS(""" + import setuptools + setuptools.setup( + name="setuptools-test-fetcher", + version="1.0", + setup_requires = ['does-not-exist'], + ) + """)), + ('setup.cfg', ''), + ]) + yield dist_path + + use_setup_cfg = ( + (), + ('dependency_links',), + ('setup_requires',), + ('dependency_links', 'setup_requires'), + ) + + @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg) + def test_setup_requires_overrides_version_conflict(self, use_setup_cfg): + """ + Regression test for distribution issue 323: + https://bitbucket.org/tarek/distribute/issues/323 + + Ensures that a distribution's setup_requires requirements can still be + installed and used locally even if a conflicting version of that + requirement is already on the path. + """ + + fake_dist = PRDistribution('does-not-matter', project_name='foobar', + version='0.0') + working_set.add(fake_dist) + + with contexts.save_pkg_resources_state(): + with contexts.tempdir() as temp_dir: + test_pkg = create_setup_requires_package( + temp_dir, use_setup_cfg=use_setup_cfg) + test_setup_py = os.path.join(test_pkg, 'setup.py') + with contexts.quiet() as (stdout, stderr): + # Don't even need to install the package, just + # running the setup.py at all is sufficient + run_setup(test_setup_py, [str('--name')]) + + lines = stdout.readlines() + assert len(lines) > 0 + assert lines[-1].strip() == 'test_pkg' + + @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg) + def test_setup_requires_override_nspkg(self, use_setup_cfg): + """ + Like ``test_setup_requires_overrides_version_conflict`` but where the + ``setup_requires`` package is part of a namespace package that has + *already* been imported. + """ + + with contexts.save_pkg_resources_state(): + with contexts.tempdir() as temp_dir: + foobar_1_archive = os.path.join(temp_dir, 'foo.bar-0.1.tar.gz') + make_nspkg_sdist(foobar_1_archive, 'foo.bar', '0.1') + # Now actually go ahead an extract to the temp dir and add the + # extracted path to sys.path so foo.bar v0.1 is importable + foobar_1_dir = os.path.join(temp_dir, 'foo.bar-0.1') + os.mkdir(foobar_1_dir) + with tarfile.open(foobar_1_archive) as tf: + tf.extractall(foobar_1_dir) + sys.path.insert(1, foobar_1_dir) + + dist = PRDistribution(foobar_1_dir, project_name='foo.bar', + version='0.1') + working_set.add(dist) + + template = DALS("""\ + import foo # Even with foo imported first the + # setup_requires package should override + import setuptools + setuptools.setup(**%r) + + if not (hasattr(foo, '__path__') and + len(foo.__path__) == 2): + print('FAIL') + + if 'foo.bar-0.2' not in foo.__path__[0]: + print('FAIL') + """) + + test_pkg = create_setup_requires_package( + temp_dir, 'foo.bar', '0.2', make_nspkg_sdist, template, + use_setup_cfg=use_setup_cfg) + + test_setup_py = os.path.join(test_pkg, 'setup.py') + + with contexts.quiet() as (stdout, stderr): + try: + # Don't even need to install the package, just + # running the setup.py at all is sufficient + run_setup(test_setup_py, [str('--name')]) + except pkg_resources.VersionConflict: + self.fail( + 'Installing setup.py requirements ' + 'caused a VersionConflict') + + assert 'FAIL' not in stdout.getvalue() + lines = stdout.readlines() + assert len(lines) > 0 + assert lines[-1].strip() == 'test_pkg' + + @pytest.mark.parametrize('use_setup_cfg', use_setup_cfg) + def test_setup_requires_with_attr_version(self, use_setup_cfg): + def make_dependency_sdist(dist_path, distname, version): + files = [( + 'setup.py', + DALS(""" + import setuptools + setuptools.setup( + name={name!r}, + version={version!r}, + py_modules=[{name!r}], + ) + """.format(name=distname, version=version)), + ), ( + distname + '.py', + DALS(""" + version = 42 + """), + )] + make_sdist(dist_path, files) + with contexts.save_pkg_resources_state(): + with contexts.tempdir() as temp_dir: + test_pkg = create_setup_requires_package( + temp_dir, setup_attrs=dict(version='attr: foobar.version'), + make_package=make_dependency_sdist, + use_setup_cfg=use_setup_cfg + ('version',), + ) + test_setup_py = os.path.join(test_pkg, 'setup.py') + with contexts.quiet() as (stdout, stderr): + run_setup(test_setup_py, [str('--version')]) + lines = stdout.readlines() + assert len(lines) > 0 + assert lines[-1].strip() == '42' + + def test_setup_requires_honors_pip_env(self, mock_index, monkeypatch): + monkeypatch.setenv(str('PIP_RETRIES'), str('0')) + monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + monkeypatch.setenv('PIP_NO_INDEX', 'false') + monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url) + with contexts.save_pkg_resources_state(): + with contexts.tempdir() as temp_dir: + test_pkg = create_setup_requires_package( + temp_dir, 'python-xlib', '0.19', + setup_attrs=dict(dependency_links=[])) + test_setup_cfg = os.path.join(test_pkg, 'setup.cfg') + with open(test_setup_cfg, 'w') as fp: + fp.write(DALS( + ''' + [easy_install] + index_url = https://pypi.org/legacy/ + ''')) + test_setup_py = os.path.join(test_pkg, 'setup.py') + with pytest.raises(distutils.errors.DistutilsError): + run_setup(test_setup_py, [str('--version')]) + assert len(mock_index.requests) == 1 + assert mock_index.requests[0].path == '/python-xlib/' + + def test_setup_requires_with_pep508_url(self, mock_index, monkeypatch): + monkeypatch.setenv(str('PIP_RETRIES'), str('0')) + monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + monkeypatch.setenv(str('PIP_INDEX_URL'), mock_index.url) + with contexts.save_pkg_resources_state(): + with contexts.tempdir() as temp_dir: + dep_sdist = os.path.join(temp_dir, 'dep.tar.gz') + make_trivial_sdist(dep_sdist, 'dependency', '42') + dep_url = path_to_url(dep_sdist, authority='localhost') + test_pkg = create_setup_requires_package( + temp_dir, + # Ignored (overridden by setup_attrs) + 'python-xlib', '0.19', + setup_attrs=dict( + setup_requires='dependency @ %s' % dep_url)) + test_setup_py = os.path.join(test_pkg, 'setup.py') + run_setup(test_setup_py, [str('--version')]) + assert len(mock_index.requests) == 0 + + def test_setup_requires_with_allow_hosts(self, mock_index): + ''' The `allow-hosts` option in not supported anymore. ''' + files = { + 'test_pkg': { + 'setup.py': DALS(''' + from setuptools import setup + setup(setup_requires='python-xlib') + '''), + 'setup.cfg': DALS(''' + [easy_install] + allow_hosts = * + '''), + } + } + with contexts.save_pkg_resources_state(): + with contexts.tempdir() as temp_dir: + path.build(files, prefix=temp_dir) + setup_py = str(pathlib.Path(temp_dir, 'test_pkg', 'setup.py')) + with pytest.raises(distutils.errors.DistutilsError): + run_setup(setup_py, [str('--version')]) + assert len(mock_index.requests) == 0 + + def test_setup_requires_with_python_requires(self, monkeypatch, tmpdir): + ''' Check `python_requires` is honored. ''' + monkeypatch.setenv(str('PIP_RETRIES'), str('0')) + monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) + monkeypatch.setenv(str('PIP_VERBOSE'), str('1')) + dep_1_0_sdist = 'dep-1.0.tar.gz' + dep_1_0_url = path_to_url(str(tmpdir / dep_1_0_sdist)) + dep_1_0_python_requires = '>=2.7' + make_python_requires_sdist( + str(tmpdir / dep_1_0_sdist), 'dep', '1.0', dep_1_0_python_requires) + dep_2_0_sdist = 'dep-2.0.tar.gz' + dep_2_0_url = path_to_url(str(tmpdir / dep_2_0_sdist)) + dep_2_0_python_requires = '!=' + '.'.join( + map(str, sys.version_info[:2])) + '.*' + make_python_requires_sdist( + str(tmpdir / dep_2_0_sdist), 'dep', '2.0', dep_2_0_python_requires) + index = tmpdir / 'index.html' + index.write_text(DALS( + ''' + <!DOCTYPE html> + <html><head><title>Links for dep</title></head> + <body> + <h1>Links for dep</h1> + <a href="{dep_1_0_url}" data-requires-python="{dep_1_0_python_requires}">{dep_1_0_sdist}</a><br/> + <a href="{dep_2_0_url}" data-requires-python="{dep_2_0_python_requires}">{dep_2_0_sdist}</a><br/> + </body> + </html> + ''').format( # noqa + dep_1_0_url=dep_1_0_url, + dep_1_0_sdist=dep_1_0_sdist, + dep_1_0_python_requires=dep_1_0_python_requires, + dep_2_0_url=dep_2_0_url, + dep_2_0_sdist=dep_2_0_sdist, + dep_2_0_python_requires=dep_2_0_python_requires, + ), 'utf-8') + index_url = path_to_url(str(index)) + with contexts.save_pkg_resources_state(): + test_pkg = create_setup_requires_package( + str(tmpdir), + 'python-xlib', '0.19', # Ignored (overridden by setup_attrs). + setup_attrs=dict( + setup_requires='dep', dependency_links=[index_url])) + test_setup_py = os.path.join(test_pkg, 'setup.py') + run_setup(test_setup_py, [str('--version')]) + eggs = list(map(str, pkg_resources.find_distributions( + os.path.join(test_pkg, '.eggs')))) + assert eggs == ['dep 1.0'] + + @pytest.mark.parametrize( + 'with_dependency_links_in_setup_py', + (False, True)) + def test_setup_requires_with_find_links_in_setup_cfg( + self, monkeypatch, + with_dependency_links_in_setup_py): + monkeypatch.setenv(str('PIP_RETRIES'), str('0')) + monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + with contexts.save_pkg_resources_state(): + with contexts.tempdir() as temp_dir: + make_trivial_sdist( + os.path.join(temp_dir, 'python-xlib-42.tar.gz'), + 'python-xlib', + '42') + test_pkg = os.path.join(temp_dir, 'test_pkg') + test_setup_py = os.path.join(test_pkg, 'setup.py') + test_setup_cfg = os.path.join(test_pkg, 'setup.cfg') + os.mkdir(test_pkg) + with open(test_setup_py, 'w') as fp: + if with_dependency_links_in_setup_py: + dependency_links = [os.path.join(temp_dir, 'links')] + else: + dependency_links = [] + fp.write(DALS( + ''' + from setuptools import installer, setup + setup(setup_requires='python-xlib==42', + dependency_links={dependency_links!r}) + ''').format( + dependency_links=dependency_links)) + with open(test_setup_cfg, 'w') as fp: + fp.write(DALS( + ''' + [easy_install] + index_url = {index_url} + find_links = {find_links} + ''').format(index_url=os.path.join(temp_dir, 'index'), + find_links=temp_dir)) + run_setup(test_setup_py, [str('--version')]) + + def test_setup_requires_with_transitive_extra_dependency( + self, monkeypatch): + # Use case: installing a package with a build dependency on + # an already installed `dep[extra]`, which in turn depends + # on `extra_dep` (whose is not already installed). + with contexts.save_pkg_resources_state(): + with contexts.tempdir() as temp_dir: + # Create source distribution for `extra_dep`. + make_trivial_sdist( + os.path.join(temp_dir, 'extra_dep-1.0.tar.gz'), + 'extra_dep', '1.0') + # Create source tree for `dep`. + dep_pkg = os.path.join(temp_dir, 'dep') + os.mkdir(dep_pkg) + path.build({ + 'setup.py': + DALS(""" + import setuptools + setuptools.setup( + name='dep', version='2.0', + extras_require={'extra': ['extra_dep']}, + ) + """), + 'setup.cfg': '', + }, prefix=dep_pkg) + # "Install" dep. + run_setup( + os.path.join(dep_pkg, 'setup.py'), [str('dist_info')]) + working_set.add_entry(dep_pkg) + # Create source tree for test package. + test_pkg = os.path.join(temp_dir, 'test_pkg') + test_setup_py = os.path.join(test_pkg, 'setup.py') + os.mkdir(test_pkg) + with open(test_setup_py, 'w') as fp: + fp.write(DALS( + ''' + from setuptools import installer, setup + setup(setup_requires='dep[extra]') + ''')) + # Check... + monkeypatch.setenv(str('PIP_FIND_LINKS'), str(temp_dir)) + monkeypatch.setenv(str('PIP_NO_INDEX'), str('1')) + monkeypatch.setenv(str('PIP_RETRIES'), str('0')) + monkeypatch.setenv(str('PIP_TIMEOUT'), str('0')) + run_setup(test_setup_py, [str('--version')]) + + +def make_trivial_sdist(dist_path, distname, version): + """ + Create a simple sdist tarball at dist_path, containing just a simple + setup.py. + """ + + make_sdist(dist_path, [ + ('setup.py', + DALS("""\ + import setuptools + setuptools.setup( + name=%r, + version=%r + ) + """ % (distname, version))), + ('setup.cfg', ''), + ]) + + +def make_nspkg_sdist(dist_path, distname, version): + """ + Make an sdist tarball with distname and version which also contains one + package with the same name as distname. The top-level package is + designated a namespace package). + """ + + parts = distname.split('.') + nspackage = parts[0] + + packages = ['.'.join(parts[:idx]) for idx in range(1, len(parts) + 1)] + + setup_py = DALS("""\ + import setuptools + setuptools.setup( + name=%r, + version=%r, + packages=%r, + namespace_packages=[%r] + ) + """ % (distname, version, packages, nspackage)) + + init = "__import__('pkg_resources').declare_namespace(__name__)" + + files = [('setup.py', setup_py), + (os.path.join(nspackage, '__init__.py'), init)] + for package in packages[1:]: + filename = os.path.join(*(package.split('.') + ['__init__.py'])) + files.append((filename, '')) + + make_sdist(dist_path, files) + + +def make_python_requires_sdist(dist_path, distname, version, python_requires): + make_sdist(dist_path, [ + ( + 'setup.py', + DALS("""\ + import setuptools + setuptools.setup( + name={name!r}, + version={version!r}, + python_requires={python_requires!r}, + ) + """).format( + name=distname, version=version, + python_requires=python_requires)), + ('setup.cfg', ''), + ]) + + +def make_sdist(dist_path, files): + """ + Create a simple sdist tarball at dist_path, containing the files + listed in ``files`` as ``(filename, content)`` tuples. + """ + + # Distributions with only one file don't play well with pip. + assert len(files) > 1 + with tarfile.open(dist_path, 'w:gz') as dist: + for filename, content in files: + file_bytes = io.BytesIO(content.encode('utf-8')) + file_info = tarfile.TarInfo(name=filename) + file_info.size = len(file_bytes.getvalue()) + file_info.mtime = int(time.time()) + dist.addfile(file_info, fileobj=file_bytes) + + +def create_setup_requires_package(path, distname='foobar', version='0.1', + make_package=make_trivial_sdist, + setup_py_template=None, setup_attrs={}, + use_setup_cfg=()): + """Creates a source tree under path for a trivial test package that has a + single requirement in setup_requires--a tarball for that requirement is + also created and added to the dependency_links argument. + + ``distname`` and ``version`` refer to the name/version of the package that + the test package requires via ``setup_requires``. The name of the test + package itself is just 'test_pkg'. + """ + + test_setup_attrs = { + 'name': 'test_pkg', 'version': '0.0', + 'setup_requires': ['%s==%s' % (distname, version)], + 'dependency_links': [os.path.abspath(path)] + } + test_setup_attrs.update(setup_attrs) + + test_pkg = os.path.join(path, 'test_pkg') + os.mkdir(test_pkg) + + # setup.cfg + if use_setup_cfg: + options = [] + metadata = [] + for name in use_setup_cfg: + value = test_setup_attrs.pop(name) + if name in 'name version'.split(): + section = metadata + else: + section = options + if isinstance(value, (tuple, list)): + value = ';'.join(value) + section.append('%s: %s' % (name, value)) + test_setup_cfg_contents = DALS( + """ + [metadata] + {metadata} + [options] + {options} + """ + ).format( + options='\n'.join(options), + metadata='\n'.join(metadata), + ) + else: + test_setup_cfg_contents = '' + with open(os.path.join(test_pkg, 'setup.cfg'), 'w') as f: + f.write(test_setup_cfg_contents) + + # setup.py + if setup_py_template is None: + setup_py_template = DALS("""\ + import setuptools + setuptools.setup(**%r) + """) + with open(os.path.join(test_pkg, 'setup.py'), 'w') as f: + f.write(setup_py_template % test_setup_attrs) + + foobar_path = os.path.join(path, '%s-%s.tar.gz' % (distname, version)) + make_package(foobar_path, distname, version) + + return test_pkg + + +@pytest.mark.skipif( + sys.platform.startswith('java') and ei.is_sh(sys.executable), + reason="Test cannot run under java when executable is sh" +) +class TestScriptHeader: + non_ascii_exe = '/Users/José/bin/python' + exe_with_spaces = r'C:\Program Files\Python36\python.exe' + + def test_get_script_header(self): + expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable)) + actual = ei.ScriptWriter.get_header('#!/usr/local/bin/python') + assert actual == expected + + def test_get_script_header_args(self): + expected = '#!%s -x\n' % ei.nt_quote_arg( + os.path.normpath(sys.executable)) + actual = ei.ScriptWriter.get_header('#!/usr/bin/python -x') + assert actual == expected + + def test_get_script_header_non_ascii_exe(self): + actual = ei.ScriptWriter.get_header( + '#!/usr/bin/python', + executable=self.non_ascii_exe) + expected = str('#!%s -x\n') % self.non_ascii_exe + assert actual == expected + + def test_get_script_header_exe_with_spaces(self): + actual = ei.ScriptWriter.get_header( + '#!/usr/bin/python', + executable='"' + self.exe_with_spaces + '"') + expected = '#!"%s"\n' % self.exe_with_spaces + assert actual == expected + + +class TestCommandSpec: + def test_custom_launch_command(self): + """ + Show how a custom CommandSpec could be used to specify a #! executable + which takes parameters. + """ + cmd = ei.CommandSpec(['/usr/bin/env', 'python3']) + assert cmd.as_header() == '#!/usr/bin/env python3\n' + + def test_from_param_for_CommandSpec_is_passthrough(self): + """ + from_param should return an instance of a CommandSpec + """ + cmd = ei.CommandSpec(['python']) + cmd_new = ei.CommandSpec.from_param(cmd) + assert cmd is cmd_new + + @mock.patch('sys.executable', TestScriptHeader.exe_with_spaces) + @mock.patch.dict(os.environ) + def test_from_environment_with_spaces_in_executable(self): + os.environ.pop('__PYVENV_LAUNCHER__', None) + cmd = ei.CommandSpec.from_environment() + assert len(cmd) == 1 + assert cmd.as_header().startswith('#!"') + + def test_from_simple_string_uses_shlex(self): + """ + In order to support `executable = /usr/bin/env my-python`, make sure + from_param invokes shlex on that input. + """ + cmd = ei.CommandSpec.from_param('/usr/bin/env my-python') + assert len(cmd) == 2 + assert '"' not in cmd.as_header() + + +class TestWindowsScriptWriter: + def test_header(self): + hdr = ei.WindowsScriptWriter.get_header('') + assert hdr.startswith('#!') + assert hdr.endswith('\n') + hdr = hdr.lstrip('#!') + hdr = hdr.rstrip('\n') + # header should not start with an escaped quote + assert not hdr.startswith('\\"') diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py new file mode 100644 index 00000000..ee07b5a1 --- /dev/null +++ b/setuptools/tests/test_egg_info.py @@ -0,0 +1,1086 @@ +import sys +import ast +import os +import glob +import re +import stat +import time +from typing import List, Tuple + +import pytest +from jaraco import path + +from setuptools.command.egg_info import ( + egg_info, manifest_maker, EggInfoDeprecationWarning, get_pkg_info_revision, +) +from setuptools.dist import Distribution + +from . import environment +from .textwrap import DALS +from . import contexts + + +class Environment(str): + pass + + +class TestEggInfo: + + setup_script = DALS(""" + from setuptools import setup + + setup( + name='foo', + py_modules=['hello'], + entry_points={'console_scripts': ['hi = hello.run']}, + zip_safe=False, + ) + """) + + def _create_project(self): + path.build({ + 'setup.py': self.setup_script, + 'hello.py': DALS(""" + def run(): + print('hello') + """) + }) + + @staticmethod + def _extract_mv_version(pkg_info_lines: List[str]) -> Tuple[int, int]: + version_str = pkg_info_lines[0].split(' ')[1] + return tuple(map(int, version_str.split('.')[:2])) + + @pytest.fixture + def env(self): + with contexts.tempdir(prefix='setuptools-test.') as env_dir: + env = Environment(env_dir) + os.chmod(env_dir, stat.S_IRWXU) + subs = 'home', 'lib', 'scripts', 'data', 'egg-base' + env.paths = dict( + (dirname, os.path.join(env_dir, dirname)) + for dirname in subs + ) + list(map(os.mkdir, env.paths.values())) + path.build({ + env.paths['home']: { + '.pydistutils.cfg': DALS(""" + [egg_info] + egg-base = %(egg-base)s + """ % env.paths) + } + }) + yield env + + def test_egg_info_save_version_info_setup_empty(self, tmpdir_cwd, env): + """ + When the egg_info section is empty or not present, running + save_version_info should add the settings to the setup.cfg + in a deterministic order. + """ + setup_cfg = os.path.join(env.paths['home'], 'setup.cfg') + dist = Distribution() + ei = egg_info(dist) + ei.initialize_options() + ei.save_version_info(setup_cfg) + + with open(setup_cfg, 'r') as f: + content = f.read() + + assert '[egg_info]' in content + assert 'tag_build =' in content + assert 'tag_date = 0' in content + + expected_order = 'tag_build', 'tag_date', + + self._validate_content_order(content, expected_order) + + @staticmethod + def _validate_content_order(content, expected): + """ + Assert that the strings in expected appear in content + in order. + """ + pattern = '.*'.join(expected) + flags = re.MULTILINE | re.DOTALL + assert re.search(pattern, content, flags) + + def test_egg_info_save_version_info_setup_defaults(self, tmpdir_cwd, env): + """ + When running save_version_info on an existing setup.cfg + with the 'default' values present from a previous run, + the file should remain unchanged. + """ + setup_cfg = os.path.join(env.paths['home'], 'setup.cfg') + path.build({ + setup_cfg: DALS(""" + [egg_info] + tag_build = + tag_date = 0 + """), + }) + dist = Distribution() + ei = egg_info(dist) + ei.initialize_options() + ei.save_version_info(setup_cfg) + + with open(setup_cfg, 'r') as f: + content = f.read() + + assert '[egg_info]' in content + assert 'tag_build =' in content + assert 'tag_date = 0' in content + + expected_order = 'tag_build', 'tag_date', + + self._validate_content_order(content, expected_order) + + def test_expected_files_produced(self, tmpdir_cwd, env): + self._create_project() + + self._run_egg_info_command(tmpdir_cwd, env) + actual = os.listdir('foo.egg-info') + + expected = [ + 'PKG-INFO', + 'SOURCES.txt', + 'dependency_links.txt', + 'entry_points.txt', + 'not-zip-safe', + 'top_level.txt', + ] + assert sorted(actual) == expected + + def test_license_is_a_string(self, tmpdir_cwd, env): + setup_config = DALS(""" + [metadata] + name=foo + version=0.0.1 + license=file:MIT + """) + + setup_script = DALS(""" + from setuptools import setup + + setup() + """) + + path.build({ + 'setup.py': setup_script, + 'setup.cfg': setup_config, + }) + + # This command should fail with a ValueError, but because it's + # currently configured to use a subprocess, the actual traceback + # object is lost and we need to parse it from stderr + with pytest.raises(AssertionError) as exc: + self._run_egg_info_command(tmpdir_cwd, env) + + # Hopefully this is not too fragile: the only argument to the + # assertion error should be a traceback, ending with: + # ValueError: .... + # + # assert not 1 + tb = exc.value.args[0].split('\n') + assert tb[-3].lstrip().startswith('ValueError') + + def test_rebuilt(self, tmpdir_cwd, env): + """Ensure timestamps are updated when the command is re-run.""" + self._create_project() + + self._run_egg_info_command(tmpdir_cwd, env) + timestamp_a = os.path.getmtime('foo.egg-info') + + # arbitrary sleep just to handle *really* fast systems + time.sleep(.001) + + self._run_egg_info_command(tmpdir_cwd, env) + timestamp_b = os.path.getmtime('foo.egg-info') + + assert timestamp_a != timestamp_b + + def test_manifest_template_is_read(self, tmpdir_cwd, env): + self._create_project() + path.build({ + 'MANIFEST.in': DALS(""" + recursive-include docs *.rst + """), + 'docs': { + 'usage.rst': "Run 'hi'", + } + }) + self._run_egg_info_command(tmpdir_cwd, env) + egg_info_dir = os.path.join('.', 'foo.egg-info') + sources_txt = os.path.join(egg_info_dir, 'SOURCES.txt') + with open(sources_txt) as f: + assert 'docs/usage.rst' in f.read().split('\n') + + def _setup_script_with_requires(self, requires, use_setup_cfg=False): + setup_script = DALS( + ''' + from setuptools import setup + + setup(name='foo', zip_safe=False, %s) + ''' + ) % ('' if use_setup_cfg else requires) + setup_config = requires if use_setup_cfg else '' + path.build({ + 'setup.py': setup_script, + 'setup.cfg': setup_config, + }) + + mismatch_marker = "python_version<'{this_ver}'".format( + this_ver=sys.version_info[0], + ) + # Alternate equivalent syntax. + mismatch_marker_alternate = 'python_version < "{this_ver}"'.format( + this_ver=sys.version_info[0], + ) + invalid_marker = "<=>++" + + class RequiresTestHelper: + + @staticmethod + def parametrize(*test_list, **format_dict): + idlist = [] + argvalues = [] + for test in test_list: + test_params = test.lstrip().split('\n\n', 3) + name_kwargs = test_params.pop(0).split('\n') + if len(name_kwargs) > 1: + val = name_kwargs[1].strip() + install_cmd_kwargs = ast.literal_eval(val) + else: + install_cmd_kwargs = {} + name = name_kwargs[0].strip() + setup_py_requires, setup_cfg_requires, expected_requires = ( + DALS(a).format(**format_dict) for a in test_params + ) + for id_, requires, use_cfg in ( + (name, setup_py_requires, False), + (name + '_in_setup_cfg', setup_cfg_requires, True), + ): + idlist.append(id_) + marks = () + if requires.startswith('@xfail\n'): + requires = requires[7:] + marks = pytest.mark.xfail + argvalues.append(pytest.param(requires, use_cfg, + expected_requires, + install_cmd_kwargs, + marks=marks)) + return pytest.mark.parametrize( + 'requires,use_setup_cfg,' + 'expected_requires,install_cmd_kwargs', + argvalues, ids=idlist, + ) + + @RequiresTestHelper.parametrize( + # Format of a test: + # + # id + # install_cmd_kwargs [optional] + # + # requires block (when used in setup.py) + # + # requires block (when used in setup.cfg) + # + # expected contents of requires.txt + + ''' + install_requires_deterministic + + install_requires=["wheel>=0.5", "pytest"] + + [options] + install_requires = + wheel>=0.5 + pytest + + wheel>=0.5 + pytest + ''', + + ''' + install_requires_ordered + + install_requires=["pytest>=3.0.2,!=10.9999"] + + [options] + install_requires = + pytest>=3.0.2,!=10.9999 + + pytest!=10.9999,>=3.0.2 + ''', + + ''' + install_requires_with_marker + + install_requires=["barbazquux;{mismatch_marker}"], + + [options] + install_requires = + barbazquux; {mismatch_marker} + + [:{mismatch_marker_alternate}] + barbazquux + ''', + + ''' + install_requires_with_extra + {'cmd': ['egg_info']} + + install_requires=["barbazquux [test]"], + + [options] + install_requires = + barbazquux [test] + + barbazquux[test] + ''', + + ''' + install_requires_with_extra_and_marker + + install_requires=["barbazquux [test]; {mismatch_marker}"], + + [options] + install_requires = + barbazquux [test]; {mismatch_marker} + + [:{mismatch_marker_alternate}] + barbazquux[test] + ''', + + ''' + setup_requires_with_markers + + setup_requires=["barbazquux;{mismatch_marker}"], + + [options] + setup_requires = + barbazquux; {mismatch_marker} + + ''', + + ''' + tests_require_with_markers + {'cmd': ['test'], 'output': "Ran 0 tests in"} + + tests_require=["barbazquux;{mismatch_marker}"], + + [options] + tests_require = + barbazquux; {mismatch_marker} + + ''', + + ''' + extras_require_with_extra + {'cmd': ['egg_info']} + + extras_require={{"extra": ["barbazquux [test]"]}}, + + [options.extras_require] + extra = barbazquux [test] + + [extra] + barbazquux[test] + ''', + + ''' + extras_require_with_extra_and_marker_in_req + + extras_require={{"extra": ["barbazquux [test]; {mismatch_marker}"]}}, + + [options.extras_require] + extra = + barbazquux [test]; {mismatch_marker} + + [extra] + + [extra:{mismatch_marker_alternate}] + barbazquux[test] + ''', + + # FIXME: ConfigParser does not allow : in key names! + ''' + extras_require_with_marker + + extras_require={{":{mismatch_marker}": ["barbazquux"]}}, + + @xfail + [options.extras_require] + :{mismatch_marker} = barbazquux + + [:{mismatch_marker}] + barbazquux + ''', + + ''' + extras_require_with_marker_in_req + + extras_require={{"extra": ["barbazquux; {mismatch_marker}"]}}, + + [options.extras_require] + extra = + barbazquux; {mismatch_marker} + + [extra] + + [extra:{mismatch_marker_alternate}] + barbazquux + ''', + + ''' + extras_require_with_empty_section + + extras_require={{"empty": []}}, + + [options.extras_require] + empty = + + [empty] + ''', + # Format arguments. + invalid_marker=invalid_marker, + mismatch_marker=mismatch_marker, + mismatch_marker_alternate=mismatch_marker_alternate, + ) + def test_requires( + self, tmpdir_cwd, env, requires, use_setup_cfg, + expected_requires, install_cmd_kwargs): + self._setup_script_with_requires(requires, use_setup_cfg) + self._run_egg_info_command(tmpdir_cwd, env, **install_cmd_kwargs) + egg_info_dir = os.path.join('.', 'foo.egg-info') + requires_txt = os.path.join(egg_info_dir, 'requires.txt') + if os.path.exists(requires_txt): + with open(requires_txt) as fp: + install_requires = fp.read() + else: + install_requires = '' + assert install_requires.lstrip() == expected_requires + assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == [] + + def test_install_requires_unordered_disallowed(self, tmpdir_cwd, env): + """ + Packages that pass unordered install_requires sequences + should be rejected as they produce non-deterministic + builds. See #458. + """ + req = 'install_requires={"fake-factory==0.5.2", "pytz"}' + self._setup_script_with_requires(req) + with pytest.raises(AssertionError): + self._run_egg_info_command(tmpdir_cwd, env) + + def test_extras_require_with_invalid_marker(self, tmpdir_cwd, env): + tmpl = 'extras_require={{":{marker}": ["barbazquux"]}},' + req = tmpl.format(marker=self.invalid_marker) + self._setup_script_with_requires(req) + with pytest.raises(AssertionError): + self._run_egg_info_command(tmpdir_cwd, env) + assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == [] + + def test_extras_require_with_invalid_marker_in_req(self, tmpdir_cwd, env): + tmpl = 'extras_require={{"extra": ["barbazquux; {marker}"]}},' + req = tmpl.format(marker=self.invalid_marker) + self._setup_script_with_requires(req) + with pytest.raises(AssertionError): + self._run_egg_info_command(tmpdir_cwd, env) + assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == [] + + def test_provides_extra(self, tmpdir_cwd, env): + self._setup_script_with_requires( + 'extras_require={"foobar": ["barbazquux"]},') + environ = os.environ.copy().update( + HOME=env.paths['home'], + ) + code, data = environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + env=environ, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + assert 'Provides-Extra: foobar' in pkg_info_lines + assert 'Metadata-Version: 2.1' in pkg_info_lines + + def test_doesnt_provides_extra(self, tmpdir_cwd, env): + self._setup_script_with_requires( + '''install_requires=["spam ; python_version<'3.6'"]''') + environ = os.environ.copy().update( + HOME=env.paths['home'], + ) + environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + env=environ, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_text = pkginfo_file.read() + assert 'Provides-Extra:' not in pkg_info_text + + @pytest.mark.parametrize("files, license_in_sources", [ + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICENSE + """), + 'LICENSE': "Test license" + }, True), # with license + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = INVALID_LICENSE + """), + 'LICENSE': "Test license" + }, False), # with an invalid license + ({ + 'setup.cfg': DALS(""" + """), + 'LICENSE': "Test license" + }, True), # no license_file attribute, LICENSE auto-included + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICENSE + """), + 'MANIFEST.in': "exclude LICENSE", + 'LICENSE': "Test license" + }, True), # manifest is overwritten by license_file + pytest.param({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICEN[CS]E* + """), + 'LICENSE': "Test license", + }, True, + id="glob_pattern"), + ]) + def test_setup_cfg_license_file( + self, tmpdir_cwd, env, files, license_in_sources): + self._create_project() + path.build(files) + + environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]) + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + + with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file: + sources_text = sources_file.read() + + if license_in_sources: + assert 'LICENSE' in sources_text + else: + assert 'LICENSE' not in sources_text + # for invalid license test + assert 'INVALID_LICENSE' not in sources_text + + @pytest.mark.parametrize("files, incl_licenses, excl_licenses", [ + ({ + 'setup.cfg': DALS(""" + [metadata] + license_files = + LICENSE-ABC + LICENSE-XYZ + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-XYZ': "XYZ license" + }, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with licenses + ({ + 'setup.cfg': DALS(""" + [metadata] + license_files = LICENSE-ABC, LICENSE-XYZ + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-XYZ': "XYZ license" + }, ['LICENSE-ABC', 'LICENSE-XYZ'], []), # with commas + ({ + 'setup.cfg': DALS(""" + [metadata] + license_files = + LICENSE-ABC + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-XYZ': "XYZ license" + }, ['LICENSE-ABC'], ['LICENSE-XYZ']), # with one license + ({ + 'setup.cfg': DALS(""" + [metadata] + license_files = + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-XYZ': "XYZ license" + }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # empty + ({ + 'setup.cfg': DALS(""" + [metadata] + license_files = LICENSE-XYZ + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-XYZ': "XYZ license" + }, ['LICENSE-XYZ'], ['LICENSE-ABC']), # on same line + ({ + 'setup.cfg': DALS(""" + [metadata] + license_files = + LICENSE-ABC + INVALID_LICENSE + """), + 'LICENSE-ABC': "Test license" + }, ['LICENSE-ABC'], ['INVALID_LICENSE']), # with an invalid license + ({ + 'setup.cfg': DALS(""" + """), + 'LICENSE': "Test license" + }, ['LICENSE'], []), # no license_files attribute, LICENSE auto-included + ({ + 'setup.cfg': DALS(""" + [metadata] + license_files = LICENSE + """), + 'MANIFEST.in': "exclude LICENSE", + 'LICENSE': "Test license" + }, ['LICENSE'], []), # manifest is overwritten by license_files + ({ + 'setup.cfg': DALS(""" + [metadata] + license_files = + LICENSE-ABC + LICENSE-XYZ + """), + 'MANIFEST.in': "exclude LICENSE-XYZ", + 'LICENSE-ABC': "ABC license", + 'LICENSE-XYZ': "XYZ license" + # manifest is overwritten by license_files + }, ['LICENSE-ABC', 'LICENSE-XYZ'], []), + pytest.param({ + 'setup.cfg': "", + 'LICENSE-ABC': "ABC license", + 'COPYING-ABC': "ABC copying", + 'NOTICE-ABC': "ABC notice", + 'AUTHORS-ABC': "ABC authors", + 'LICENCE-XYZ': "XYZ license", + 'LICENSE': "License", + 'INVALID-LICENSE': "Invalid license", + }, [ + 'LICENSE-ABC', + 'COPYING-ABC', + 'NOTICE-ABC', + 'AUTHORS-ABC', + 'LICENCE-XYZ', + 'LICENSE', + ], ['INVALID-LICENSE'], + # ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*') + id="default_glob_patterns"), + pytest.param({ + 'setup.cfg': DALS(""" + [metadata] + license_files = + LICENSE* + """), + 'LICENSE-ABC': "ABC license", + 'NOTICE-XYZ': "XYZ notice", + }, ['LICENSE-ABC'], ['NOTICE-XYZ'], + id="no_default_glob_patterns"), + pytest.param({ + 'setup.cfg': DALS(""" + [metadata] + license_files = + LICENSE-ABC + LICENSE* + """), + 'LICENSE-ABC': "ABC license", + }, ['LICENSE-ABC'], [], + id="files_only_added_once", + ), + ]) + def test_setup_cfg_license_files( + self, tmpdir_cwd, env, files, incl_licenses, excl_licenses): + self._create_project() + path.build(files) + + environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]) + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + + with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file: + sources_lines = list(line.strip() for line in sources_file) + + for lf in incl_licenses: + assert sources_lines.count(lf) == 1 + + for lf in excl_licenses: + assert sources_lines.count(lf) == 0 + + @pytest.mark.parametrize("files, incl_licenses, excl_licenses", [ + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = + license_files = + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-XYZ': "XYZ license" + }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), # both empty + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = + LICENSE-ABC + LICENSE-XYZ + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-XYZ': "XYZ license" + # license_file is still singular + }, [], ['LICENSE-ABC', 'LICENSE-XYZ']), + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICENSE-ABC + license_files = + LICENSE-XYZ + LICENSE-PQR + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-PQR': "PQR license", + 'LICENSE-XYZ': "XYZ license" + }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), # combined + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICENSE-ABC + license_files = + LICENSE-ABC + LICENSE-XYZ + LICENSE-PQR + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-PQR': "PQR license", + 'LICENSE-XYZ': "XYZ license" + # duplicate license + }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICENSE-ABC + license_files = + LICENSE-XYZ + """), + 'LICENSE-ABC': "ABC license", + 'LICENSE-PQR': "PQR license", + 'LICENSE-XYZ': "XYZ license" + # combined subset + }, ['LICENSE-ABC', 'LICENSE-XYZ'], ['LICENSE-PQR']), + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICENSE-ABC + license_files = + LICENSE-XYZ + LICENSE-PQR + """), + 'LICENSE-PQR': "Test license" + # with invalid licenses + }, ['LICENSE-PQR'], ['LICENSE-ABC', 'LICENSE-XYZ']), + ({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICENSE-ABC + license_files = + LICENSE-PQR + LICENSE-XYZ + """), + 'MANIFEST.in': "exclude LICENSE-ABC\nexclude LICENSE-PQR", + 'LICENSE-ABC': "ABC license", + 'LICENSE-PQR': "PQR license", + 'LICENSE-XYZ': "XYZ license" + # manifest is overwritten + }, ['LICENSE-ABC', 'LICENSE-PQR', 'LICENSE-XYZ'], []), + pytest.param({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICENSE* + """), + 'LICENSE-ABC': "ABC license", + 'NOTICE-XYZ': "XYZ notice", + }, ['LICENSE-ABC'], ['NOTICE-XYZ'], + id="no_default_glob_patterns"), + pytest.param({ + 'setup.cfg': DALS(""" + [metadata] + license_file = LICENSE* + license_files = + NOTICE* + """), + 'LICENSE-ABC': "ABC license", + 'NOTICE-ABC': "ABC notice", + 'AUTHORS-ABC': "ABC authors", + }, ['LICENSE-ABC', 'NOTICE-ABC'], ['AUTHORS-ABC'], + id="combined_glob_patterrns"), + ]) + def test_setup_cfg_license_file_license_files( + self, tmpdir_cwd, env, files, incl_licenses, excl_licenses): + self._create_project() + path.build(files) + + environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]) + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + + with open(os.path.join(egg_info_dir, 'SOURCES.txt')) as sources_file: + sources_lines = list(line.strip() for line in sources_file) + + for lf in incl_licenses: + assert sources_lines.count(lf) == 1 + + for lf in excl_licenses: + assert sources_lines.count(lf) == 0 + + def test_license_file_attr_pkg_info(self, tmpdir_cwd, env): + """All matched license files should have a corresponding License-File.""" + self._create_project() + path.build({ + "setup.cfg": DALS(""" + [metadata] + license_files = + NOTICE* + LICENSE* + """), + "LICENSE-ABC": "ABC license", + "LICENSE-XYZ": "XYZ license", + "NOTICE": "included", + "IGNORE": "not include", + }) + + environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]) + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + license_file_lines = [ + line for line in pkg_info_lines if line.startswith('License-File:')] + + # Only 'NOTICE', LICENSE-ABC', and 'LICENSE-XYZ' should have been matched + # Also assert that order from license_files is keeped + assert "License-File: NOTICE" == license_file_lines[0] + assert "License-File: LICENSE-ABC" in license_file_lines[1:] + assert "License-File: LICENSE-XYZ" in license_file_lines[1:] + + def test_metadata_version(self, tmpdir_cwd, env): + """Make sure latest metadata version is used by default.""" + self._setup_script_with_requires("") + code, data = environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + # Update metadata version if changed + assert self._extract_mv_version(pkg_info_lines) == (2, 1) + + def test_long_description_content_type(self, tmpdir_cwd, env): + # Test that specifying a `long_description_content_type` keyword arg to + # the `setup` function results in writing a `Description-Content-Type` + # line to the `PKG-INFO` file in the `<distribution>.egg-info` + # directory. + # `Description-Content-Type` is described at + # https://github.com/pypa/python-packaging-user-guide/pull/258 + + self._setup_script_with_requires( + """long_description_content_type='text/markdown',""") + environ = os.environ.copy().update( + HOME=env.paths['home'], + ) + code, data = environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + env=environ, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + expected_line = 'Description-Content-Type: text/markdown' + assert expected_line in pkg_info_lines + assert 'Metadata-Version: 2.1' in pkg_info_lines + + def test_long_description(self, tmpdir_cwd, env): + # Test that specifying `long_description` and `long_description_content_type` + # keyword args to the `setup` function results in writing + # the description in the message payload of the `PKG-INFO` file + # in the `<distribution>.egg-info` directory. + self._setup_script_with_requires( + "long_description='This is a long description\\nover multiple lines'," + "long_description_content_type='text/markdown'," + ) + code, data = environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + assert 'Metadata-Version: 2.1' in pkg_info_lines + assert '' == pkg_info_lines[-1] # last line should be empty + long_desc_lines = pkg_info_lines[pkg_info_lines.index(''):] + assert 'This is a long description' in long_desc_lines + assert 'over multiple lines' in long_desc_lines + + def test_project_urls(self, tmpdir_cwd, env): + # Test that specifying a `project_urls` dict to the `setup` + # function results in writing multiple `Project-URL` lines to + # the `PKG-INFO` file in the `<distribution>.egg-info` + # directory. + # `Project-URL` is described at https://packaging.python.org + # /specifications/core-metadata/#project-url-multiple-use + + self._setup_script_with_requires( + """project_urls={ + 'Link One': 'https://example.com/one/', + 'Link Two': 'https://example.com/two/', + },""") + environ = os.environ.copy().update( + HOME=env.paths['home'], + ) + code, data = environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + env=environ, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + expected_line = 'Project-URL: Link One, https://example.com/one/' + assert expected_line in pkg_info_lines + expected_line = 'Project-URL: Link Two, https://example.com/two/' + assert expected_line in pkg_info_lines + assert self._extract_mv_version(pkg_info_lines) >= (1, 2) + + def test_license(self, tmpdir_cwd, env): + """Test single line license.""" + self._setup_script_with_requires( + "license='MIT'," + ) + code, data = environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + assert 'License: MIT' in pkg_info_lines + + def test_license_escape(self, tmpdir_cwd, env): + """Test license is escaped correctly if longer than one line.""" + self._setup_script_with_requires( + "license='This is a long license text \\nover multiple lines'," + ) + code, data = environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + + assert 'License: This is a long license text ' in pkg_info_lines + assert ' over multiple lines' in pkg_info_lines + assert 'text \n over multiple' in '\n'.join(pkg_info_lines) + + def test_python_requires_egg_info(self, tmpdir_cwd, env): + self._setup_script_with_requires( + """python_requires='>=2.7.12',""") + environ = os.environ.copy().update( + HOME=env.paths['home'], + ) + code, data = environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + env=environ, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + assert 'Requires-Python: >=2.7.12' in pkg_info_lines + assert self._extract_mv_version(pkg_info_lines) >= (1, 2) + + def test_manifest_maker_warning_suppression(self): + fixtures = [ + "standard file not found: should have one of foo.py, bar.py", + "standard file 'setup.py' not found" + ] + + for msg in fixtures: + assert manifest_maker._should_suppress_warning(msg) + + def test_egg_info_includes_setup_py(self, tmpdir_cwd): + self._create_project() + dist = Distribution({"name": "foo", "version": "0.0.1"}) + dist.script_name = "non_setup.py" + egg_info_instance = egg_info(dist) + egg_info_instance.finalize_options() + egg_info_instance.run() + + assert 'setup.py' in egg_info_instance.filelist.files + + with open(egg_info_instance.egg_info + "/SOURCES.txt") as f: + sources = f.read().split('\n') + assert 'setup.py' in sources + + def _run_egg_info_command(self, tmpdir_cwd, env, cmd=None, output=None): + environ = os.environ.copy().update( + HOME=env.paths['home'], + ) + if cmd is None: + cmd = [ + 'egg_info', + ] + code, data = environment.run_setup_py( + cmd=cmd, + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + env=environ, + ) + assert not code, data + + if output: + assert output in data + + def test_egg_info_tag_only_once(self, tmpdir_cwd, env): + self._create_project() + path.build({ + 'setup.cfg': DALS(""" + [egg_info] + tag_build = dev + tag_date = 0 + tag_svn_revision = 0 + """), + }) + self._run_egg_info_command(tmpdir_cwd, env) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + assert 'Version: 0.0.0.dev0' in pkg_info_lines + + def test_get_pkg_info_revision_deprecated(self): + pytest.warns(EggInfoDeprecationWarning, get_pkg_info_revision) diff --git a/setuptools/tests/test_extern.py b/setuptools/tests/test_extern.py new file mode 100644 index 00000000..0d6b164f --- /dev/null +++ b/setuptools/tests/test_extern.py @@ -0,0 +1,20 @@ +import importlib +import pickle + +from setuptools import Distribution +from setuptools.extern import ordered_set + + +def test_reimport_extern(): + ordered_set2 = importlib.import_module(ordered_set.__name__) + assert ordered_set is ordered_set2 + + +def test_orderedset_pickle_roundtrip(): + o1 = ordered_set.OrderedSet([1, 2, 5]) + o2 = pickle.loads(pickle.dumps(o1)) + assert o1 == o2 + + +def test_distribution_picklable(): + pickle.loads(pickle.dumps(Distribution())) diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py new file mode 100644 index 00000000..906713f6 --- /dev/null +++ b/setuptools/tests/test_find_packages.py @@ -0,0 +1,180 @@ +"""Tests for setuptools.find_packages().""" +import os +import sys +import shutil +import tempfile +import platform + +import pytest + +from setuptools import find_packages +from setuptools import find_namespace_packages + + +# modeled after CPython's test.support.can_symlink +def can_symlink(): + TESTFN = tempfile.mktemp() + symlink_path = TESTFN + "can_symlink" + try: + os.symlink(TESTFN, symlink_path) + can = True + except (OSError, NotImplementedError, AttributeError): + can = False + else: + os.remove(symlink_path) + globals().update(can_symlink=lambda: can) + return can + + +def has_symlink(): + bad_symlink = ( + # Windows symlink directory detection is broken on Python 3.2 + platform.system() == 'Windows' and sys.version_info[:2] == (3, 2) + ) + return can_symlink() and not bad_symlink + + +class TestFindPackages: + def setup_method(self, method): + self.dist_dir = tempfile.mkdtemp() + self._make_pkg_structure() + + def teardown_method(self, method): + shutil.rmtree(self.dist_dir) + + def _make_pkg_structure(self): + """Make basic package structure. + + dist/ + docs/ + conf.py + pkg/ + __pycache__/ + nspkg/ + mod.py + subpkg/ + assets/ + asset + __init__.py + setup.py + + """ + self.docs_dir = self._mkdir('docs', self.dist_dir) + self._touch('conf.py', self.docs_dir) + self.pkg_dir = self._mkdir('pkg', self.dist_dir) + self._mkdir('__pycache__', self.pkg_dir) + self.ns_pkg_dir = self._mkdir('nspkg', self.pkg_dir) + self._touch('mod.py', self.ns_pkg_dir) + self.sub_pkg_dir = self._mkdir('subpkg', self.pkg_dir) + self.asset_dir = self._mkdir('assets', self.sub_pkg_dir) + self._touch('asset', self.asset_dir) + self._touch('__init__.py', self.sub_pkg_dir) + self._touch('setup.py', self.dist_dir) + + def _mkdir(self, path, parent_dir=None): + if parent_dir: + path = os.path.join(parent_dir, path) + os.mkdir(path) + return path + + def _touch(self, path, dir_=None): + if dir_: + path = os.path.join(dir_, path) + fp = open(path, 'w') + fp.close() + return path + + def test_regular_package(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_packages(self.dist_dir) + assert packages == ['pkg', 'pkg.subpkg'] + + def test_exclude(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_packages(self.dist_dir, exclude=('pkg.*',)) + assert packages == ['pkg'] + + def test_exclude_recursive(self): + """ + Excluding a parent package should not exclude child packages as well. + """ + self._touch('__init__.py', self.pkg_dir) + self._touch('__init__.py', self.sub_pkg_dir) + packages = find_packages(self.dist_dir, exclude=('pkg',)) + assert packages == ['pkg.subpkg'] + + def test_include_excludes_other(self): + """ + If include is specified, other packages should be excluded. + """ + self._touch('__init__.py', self.pkg_dir) + alt_dir = self._mkdir('other_pkg', self.dist_dir) + self._touch('__init__.py', alt_dir) + packages = find_packages(self.dist_dir, include=['other_pkg']) + assert packages == ['other_pkg'] + + def test_dir_with_dot_is_skipped(self): + shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) + data_dir = self._mkdir('some.data', self.pkg_dir) + self._touch('__init__.py', data_dir) + self._touch('file.dat', data_dir) + packages = find_packages(self.dist_dir) + assert 'pkg.some.data' not in packages + + def test_dir_with_packages_in_subdir_is_excluded(self): + """ + Ensure that a package in a non-package such as build/pkg/__init__.py + is excluded. + """ + build_dir = self._mkdir('build', self.dist_dir) + build_pkg_dir = self._mkdir('pkg', build_dir) + self._touch('__init__.py', build_pkg_dir) + packages = find_packages(self.dist_dir) + assert 'build.pkg' not in packages + + @pytest.mark.skipif(not has_symlink(), reason='Symlink support required') + def test_symlinked_packages_are_included(self): + """ + A symbolically-linked directory should be treated like any other + directory when matched as a package. + + Create a link from lpkg -> pkg. + """ + self._touch('__init__.py', self.pkg_dir) + linked_pkg = os.path.join(self.dist_dir, 'lpkg') + os.symlink('pkg', linked_pkg) + assert os.path.isdir(linked_pkg) + packages = find_packages(self.dist_dir) + assert 'lpkg' in packages + + def _assert_packages(self, actual, expected): + assert set(actual) == set(expected) + + def test_pep420_ns_package(self): + packages = find_namespace_packages( + self.dist_dir, include=['pkg*'], exclude=['pkg.subpkg.assets']) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_includes(self): + packages = find_namespace_packages( + self.dist_dir, exclude=['pkg.subpkg.assets']) + self._assert_packages( + packages, ['docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_includes_or_excludes(self): + packages = find_namespace_packages(self.dist_dir) + expected = [ + 'docs', 'pkg', 'pkg.nspkg', 'pkg.subpkg', 'pkg.subpkg.assets'] + self._assert_packages(packages, expected) + + def test_regular_package_with_nested_pep420_ns_packages(self): + self._touch('__init__.py', self.pkg_dir) + packages = find_namespace_packages( + self.dist_dir, exclude=['docs', 'pkg.subpkg.assets']) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) + + def test_pep420_ns_package_no_non_package_dirs(self): + shutil.rmtree(self.docs_dir) + shutil.rmtree(os.path.join(self.dist_dir, 'pkg/subpkg/assets')) + packages = find_namespace_packages(self.dist_dir) + self._assert_packages(packages, ['pkg', 'pkg.nspkg', 'pkg.subpkg']) diff --git a/setuptools/tests/test_glob.py b/setuptools/tests/test_glob.py new file mode 100644 index 00000000..e99587f5 --- /dev/null +++ b/setuptools/tests/test_glob.py @@ -0,0 +1,34 @@ +import pytest +from jaraco import path + +from setuptools.glob import glob + + +@pytest.mark.parametrize('tree, pattern, matches', ( + ('', b'', []), + ('', '', []), + (''' + appveyor.yml + CHANGES.rst + LICENSE + MANIFEST.in + pyproject.toml + README.rst + setup.cfg + setup.py + ''', '*.rst', ('CHANGES.rst', 'README.rst')), + (''' + appveyor.yml + CHANGES.rst + LICENSE + MANIFEST.in + pyproject.toml + README.rst + setup.cfg + setup.py + ''', b'*.rst', (b'CHANGES.rst', b'README.rst')), +)) +def test_glob(monkeypatch, tmpdir, tree, pattern, matches): + monkeypatch.chdir(tmpdir) + path.build({name: '' for name in tree.split()}) + assert list(sorted(glob(pattern))) == list(sorted(matches)) diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py new file mode 100644 index 00000000..4338c792 --- /dev/null +++ b/setuptools/tests/test_install_scripts.py @@ -0,0 +1,90 @@ +"""install_scripts tests +""" + +import io +import sys + +import pytest + +from setuptools.command.install_scripts import install_scripts +from setuptools.dist import Distribution +from . import contexts + + +class TestInstallScripts: + settings = dict( + name='foo', + entry_points={'console_scripts': ['foo=foo:foo']}, + version='0.0', + ) + unix_exe = '/usr/dummy-test-path/local/bin/python' + unix_spaces_exe = '/usr/bin/env dummy-test-python' + win32_exe = 'C:\\Dummy Test Path\\Program Files\\Python 3.6\\python.exe' + + def _run_install_scripts(self, install_dir, executable=None): + dist = Distribution(self.settings) + dist.script_name = 'setup.py' + cmd = install_scripts(dist) + cmd.install_dir = install_dir + if executable is not None: + bs = cmd.get_finalized_command('build_scripts') + bs.executable = executable + cmd.ensure_finalized() + with contexts.quiet(): + cmd.run() + + @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only') + def test_sys_executable_escaping_unix(self, tmpdir, monkeypatch): + """ + Ensure that shebang is not quoted on Unix when getting the Python exe + from sys.executable. + """ + expected = '#!%s\n' % self.unix_exe + monkeypatch.setattr('sys.executable', self.unix_exe) + with tmpdir.as_cwd(): + self._run_install_scripts(str(tmpdir)) + with io.open(str(tmpdir.join('foo')), 'r') as f: + actual = f.readline() + assert actual == expected + + @pytest.mark.skipif(sys.platform != 'win32', reason='Windows only') + def test_sys_executable_escaping_win32(self, tmpdir, monkeypatch): + """ + Ensure that shebang is quoted on Windows when getting the Python exe + from sys.executable and it contains a space. + """ + expected = '#!"%s"\n' % self.win32_exe + monkeypatch.setattr('sys.executable', self.win32_exe) + with tmpdir.as_cwd(): + self._run_install_scripts(str(tmpdir)) + with io.open(str(tmpdir.join('foo-script.py')), 'r') as f: + actual = f.readline() + assert actual == expected + + @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only') + def test_executable_with_spaces_escaping_unix(self, tmpdir): + """ + Ensure that shebang on Unix is not quoted, even when + a value with spaces + is specified using --executable. + """ + expected = '#!%s\n' % self.unix_spaces_exe + with tmpdir.as_cwd(): + self._run_install_scripts(str(tmpdir), self.unix_spaces_exe) + with io.open(str(tmpdir.join('foo')), 'r') as f: + actual = f.readline() + assert actual == expected + + @pytest.mark.skipif(sys.platform != 'win32', reason='Windows only') + def test_executable_arg_escaping_win32(self, tmpdir): + """ + Ensure that shebang on Windows is quoted when + getting a path with spaces + from --executable, that is itself properly quoted. + """ + expected = '#!"%s"\n' % self.win32_exe + with tmpdir.as_cwd(): + self._run_install_scripts(str(tmpdir), '"' + self.win32_exe + '"') + with io.open(str(tmpdir.join('foo-script.py')), 'r') as f: + actual = f.readline() + assert actual == expected diff --git a/setuptools/tests/test_integration.py b/setuptools/tests/test_integration.py new file mode 100644 index 00000000..b5578312 --- /dev/null +++ b/setuptools/tests/test_integration.py @@ -0,0 +1,121 @@ +"""Run some integration tests. + +Try to install a few packages. +""" + +import glob +import os +import sys +import urllib.request + +import pytest + +from setuptools.command.easy_install import easy_install +from setuptools.command import easy_install as easy_install_pkg +from setuptools.dist import Distribution + + +pytestmark = pytest.mark.skipif( + 'platform.python_implementation() == "PyPy" and ' + 'platform.system() == "Windows"', + reason="pypa/setuptools#2496", +) + + +def setup_module(module): + packages = 'stevedore', 'virtualenvwrapper', 'pbr', 'novaclient' + for pkg in packages: + try: + __import__(pkg) + tmpl = "Integration tests cannot run when {pkg} is installed" + pytest.skip(tmpl.format(**locals())) + except ImportError: + pass + + try: + urllib.request.urlopen('https://pypi.python.org/pypi') + except Exception as exc: + pytest.skip(str(exc)) + + +@pytest.fixture +def install_context(request, tmpdir, monkeypatch): + """Fixture to set up temporary installation directory. + """ + # Save old values so we can restore them. + new_cwd = tmpdir.mkdir('cwd') + user_base = tmpdir.mkdir('user_base') + user_site = tmpdir.mkdir('user_site') + install_dir = tmpdir.mkdir('install_dir') + + def fin(): + # undo the monkeypatch, particularly needed under + # windows because of kept handle on cwd + monkeypatch.undo() + new_cwd.remove() + user_base.remove() + user_site.remove() + install_dir.remove() + + request.addfinalizer(fin) + + # Change the environment and site settings to control where the + # files are installed and ensure we do not overwrite anything. + monkeypatch.chdir(new_cwd) + monkeypatch.setattr(easy_install_pkg, '__file__', user_site.strpath) + monkeypatch.setattr('site.USER_BASE', user_base.strpath) + monkeypatch.setattr('site.USER_SITE', user_site.strpath) + monkeypatch.setattr('sys.path', sys.path + [install_dir.strpath]) + monkeypatch.setenv(str('PYTHONPATH'), str(os.path.pathsep.join(sys.path))) + + # Set up the command for performing the installation. + dist = Distribution() + cmd = easy_install(dist) + cmd.install_dir = install_dir.strpath + return cmd + + +def _install_one(requirement, cmd, pkgname, modulename): + cmd.args = [requirement] + cmd.ensure_finalized() + cmd.run() + target = cmd.install_dir + dest_path = glob.glob(os.path.join(target, pkgname + '*.egg')) + assert dest_path + assert os.path.exists(os.path.join(dest_path[0], pkgname, modulename)) + + +def test_stevedore(install_context): + _install_one('stevedore', install_context, + 'stevedore', 'extension.py') + + +@pytest.mark.xfail +def test_virtualenvwrapper(install_context): + _install_one('virtualenvwrapper', install_context, + 'virtualenvwrapper', 'hook_loader.py') + + +def test_pbr(install_context): + _install_one('pbr', install_context, + 'pbr', 'core.py') + + +@pytest.mark.xfail +def test_python_novaclient(install_context): + _install_one('python-novaclient', install_context, + 'novaclient', 'base.py') + + +def test_pyuri(install_context): + """ + Install the pyuri package (version 0.3.1 at the time of writing). + + This is also a regression test for issue #1016. + """ + _install_one('pyuri', install_context, 'pyuri', 'uri.py') + + pyuri = install_context.installed_projects['pyuri'] + + # The package data should be installed. + assert os.path.exists(os.path.join(pyuri.location, 'pyuri', 'uri.regex')) diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py new file mode 100644 index 00000000..82bdb9c6 --- /dev/null +++ b/setuptools/tests/test_manifest.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +"""sdist tests""" + +import contextlib +import os +import shutil +import sys +import tempfile +import itertools +import io +from distutils import log +from distutils.errors import DistutilsTemplateError + +from setuptools.command.egg_info import FileList, egg_info, translate_pattern +from setuptools.dist import Distribution +from setuptools.tests.textwrap import DALS + +import pytest + + +def make_local_path(s): + """Converts '/' in a string to os.sep""" + return s.replace('/', os.sep) + + +SETUP_ATTRS = { + 'name': 'app', + 'version': '0.0', + 'packages': ['app'], +} + +SETUP_PY = """\ +from setuptools import setup + +setup(**%r) +""" % SETUP_ATTRS + + +@contextlib.contextmanager +def quiet(): + old_stdout, old_stderr = sys.stdout, sys.stderr + sys.stdout, sys.stderr = io.StringIO(), io.StringIO() + try: + yield + finally: + sys.stdout, sys.stderr = old_stdout, old_stderr + + +def touch(filename): + open(filename, 'w').close() + + +# The set of files always in the manifest, including all files in the +# .egg-info directory +default_files = frozenset(map(make_local_path, [ + 'README.rst', + 'MANIFEST.in', + 'setup.py', + 'app.egg-info/PKG-INFO', + 'app.egg-info/SOURCES.txt', + 'app.egg-info/dependency_links.txt', + 'app.egg-info/top_level.txt', + 'app/__init__.py', +])) + + +translate_specs = [ + ('foo', ['foo'], ['bar', 'foobar']), + ('foo/bar', ['foo/bar'], ['foo/bar/baz', './foo/bar', 'foo']), + + # Glob matching + ('*.txt', ['foo.txt', 'bar.txt'], ['foo/foo.txt']), + ( + 'dir/*.txt', + ['dir/foo.txt', 'dir/bar.txt', 'dir/.txt'], ['notdir/foo.txt']), + ('*/*.py', ['bin/start.py'], []), + ('docs/page-?.txt', ['docs/page-9.txt'], ['docs/page-10.txt']), + + # Globstars change what they mean depending upon where they are + ( + 'foo/**/bar', + ['foo/bing/bar', 'foo/bing/bang/bar', 'foo/bar'], + ['foo/abar'], + ), + ( + 'foo/**', + ['foo/bar/bing.py', 'foo/x'], + ['/foo/x'], + ), + ( + '**', + ['x', 'abc/xyz', '@nything'], + [], + ), + + # Character classes + ( + 'pre[one]post', + ['preopost', 'prenpost', 'preepost'], + ['prepost', 'preonepost'], + ), + + ( + 'hello[!one]world', + ['helloxworld', 'helloyworld'], + ['hellooworld', 'helloworld', 'hellooneworld'], + ), + + ( + '[]one].txt', + ['o.txt', '].txt', 'e.txt'], + ['one].txt'], + ), + + ( + 'foo[!]one]bar', + ['fooybar'], + ['foo]bar', 'fooobar', 'fooebar'], + ), + +] +""" +A spec of inputs for 'translate_pattern' and matches and mismatches +for that input. +""" + +match_params = itertools.chain.from_iterable( + zip(itertools.repeat(pattern), matches) + for pattern, matches, mismatches in translate_specs +) + + +@pytest.fixture(params=match_params) +def pattern_match(request): + return map(make_local_path, request.param) + + +mismatch_params = itertools.chain.from_iterable( + zip(itertools.repeat(pattern), mismatches) + for pattern, matches, mismatches in translate_specs +) + + +@pytest.fixture(params=mismatch_params) +def pattern_mismatch(request): + return map(make_local_path, request.param) + + +def test_translated_pattern_match(pattern_match): + pattern, target = pattern_match + assert translate_pattern(pattern).match(target) + + +def test_translated_pattern_mismatch(pattern_mismatch): + pattern, target = pattern_mismatch + assert not translate_pattern(pattern).match(target) + + +class TempDirTestCase: + def setup_method(self, method): + self.temp_dir = tempfile.mkdtemp() + self.old_cwd = os.getcwd() + os.chdir(self.temp_dir) + + def teardown_method(self, method): + os.chdir(self.old_cwd) + shutil.rmtree(self.temp_dir) + + +class TestManifestTest(TempDirTestCase): + def setup_method(self, method): + super(TestManifestTest, self).setup_method(method) + + f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') + f.write(SETUP_PY) + f.close() + """ + Create a file tree like: + - LICENSE + - README.rst + - testing.rst + - .hidden.rst + - app/ + - __init__.py + - a.txt + - b.txt + - c.rst + - static/ + - app.js + - app.js.map + - app.css + - app.css.map + """ + + for fname in ['README.rst', '.hidden.rst', 'testing.rst', 'LICENSE']: + touch(os.path.join(self.temp_dir, fname)) + + # Set up the rest of the test package + test_pkg = os.path.join(self.temp_dir, 'app') + os.mkdir(test_pkg) + for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']: + touch(os.path.join(test_pkg, fname)) + + # Some compiled front-end assets to include + static = os.path.join(test_pkg, 'static') + os.mkdir(static) + for fname in ['app.js', 'app.js.map', 'app.css', 'app.css.map']: + touch(os.path.join(static, fname)) + + def make_manifest(self, contents): + """Write a MANIFEST.in.""" + with open(os.path.join(self.temp_dir, 'MANIFEST.in'), 'w') as f: + f.write(DALS(contents)) + + def get_files(self): + """Run egg_info and get all the files to include, as a set""" + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + cmd = egg_info(dist) + cmd.ensure_finalized() + + cmd.run() + + return set(cmd.filelist.files) + + def test_no_manifest(self): + """Check a missing MANIFEST.in includes only the standard files.""" + assert (default_files - set(['MANIFEST.in'])) == self.get_files() + + def test_empty_files(self): + """Check an empty MANIFEST.in includes only the standard files.""" + self.make_manifest("") + assert default_files == self.get_files() + + def test_include(self): + """Include extra rst files in the project root.""" + self.make_manifest("include *.rst") + files = default_files | set([ + 'testing.rst', '.hidden.rst']) + assert files == self.get_files() + + def test_exclude(self): + """Include everything in app/ except the text files""" + ml = make_local_path + self.make_manifest( + """ + include app/* + exclude app/*.txt + """) + files = default_files | set([ml('app/c.rst')]) + assert files == self.get_files() + + def test_include_multiple(self): + """Include with multiple patterns.""" + ml = make_local_path + self.make_manifest("include app/*.txt app/static/*") + files = default_files | set([ + ml('app/a.txt'), ml('app/b.txt'), + ml('app/static/app.js'), ml('app/static/app.js.map'), + ml('app/static/app.css'), ml('app/static/app.css.map')]) + assert files == self.get_files() + + def test_graft(self): + """Include the whole app/static/ directory.""" + ml = make_local_path + self.make_manifest("graft app/static") + files = default_files | set([ + ml('app/static/app.js'), ml('app/static/app.js.map'), + ml('app/static/app.css'), ml('app/static/app.css.map')]) + assert files == self.get_files() + + def test_graft_glob_syntax(self): + """Include the whole app/static/ directory.""" + ml = make_local_path + self.make_manifest("graft */static") + files = default_files | set([ + ml('app/static/app.js'), ml('app/static/app.js.map'), + ml('app/static/app.css'), ml('app/static/app.css.map')]) + assert files == self.get_files() + + def test_graft_global_exclude(self): + """Exclude all *.map files in the project.""" + ml = make_local_path + self.make_manifest( + """ + graft app/static + global-exclude *.map + """) + files = default_files | set([ + ml('app/static/app.js'), ml('app/static/app.css')]) + assert files == self.get_files() + + def test_global_include(self): + """Include all *.rst, *.js, and *.css files in the whole tree.""" + ml = make_local_path + self.make_manifest( + """ + global-include *.rst *.js *.css + """) + files = default_files | set([ + '.hidden.rst', 'testing.rst', ml('app/c.rst'), + ml('app/static/app.js'), ml('app/static/app.css')]) + assert files == self.get_files() + + def test_graft_prune(self): + """Include all files in app/, except for the whole app/static/ dir.""" + ml = make_local_path + self.make_manifest( + """ + graft app + prune app/static + """) + files = default_files | set([ + ml('app/a.txt'), ml('app/b.txt'), ml('app/c.rst')]) + assert files == self.get_files() + + +class TestFileListTest(TempDirTestCase): + """ + A copy of the relevant bits of distutils/tests/test_filelist.py, + to ensure setuptools' version of FileList keeps parity with distutils. + """ + + def setup_method(self, method): + super(TestFileListTest, self).setup_method(method) + self.threshold = log.set_threshold(log.FATAL) + self._old_log = log.Log._log + log.Log._log = self._log + self.logs = [] + + def teardown_method(self, method): + log.set_threshold(self.threshold) + log.Log._log = self._old_log + super(TestFileListTest, self).teardown_method(method) + + def _log(self, level, msg, args): + if level not in (log.DEBUG, log.INFO, log.WARN, log.ERROR, log.FATAL): + raise ValueError('%s wrong log level' % str(level)) + self.logs.append((level, msg, args)) + + def get_logs(self, *levels): + def _format(msg, args): + if len(args) == 0: + return msg + return msg % args + return [_format(msg, args) for level, msg, args + in self.logs if level in levels] + + def clear_logs(self): + self.logs = [] + + def assertNoWarnings(self): + assert self.get_logs(log.WARN) == [] + self.clear_logs() + + def assertWarnings(self): + assert len(self.get_logs(log.WARN)) > 0 + self.clear_logs() + + def make_files(self, files): + for file in files: + file = os.path.join(self.temp_dir, file) + dirname, basename = os.path.split(file) + os.makedirs(dirname, exist_ok=True) + open(file, 'w').close() + + def test_process_template_line(self): + # testing all MANIFEST.in template patterns + file_list = FileList() + ml = make_local_path + + # simulated file list + self.make_files([ + 'foo.tmp', 'ok', 'xo', 'four.txt', + 'buildout.cfg', + # filelist does not filter out VCS directories, + # it's sdist that does + ml('.hg/last-message.txt'), + ml('global/one.txt'), + ml('global/two.txt'), + ml('global/files.x'), + ml('global/here.tmp'), + ml('f/o/f.oo'), + ml('dir/graft-one'), + ml('dir/dir2/graft2'), + ml('dir3/ok'), + ml('dir3/sub/ok.txt'), + ]) + + MANIFEST_IN = DALS("""\ + include ok + include xo + exclude xo + include foo.tmp + include buildout.cfg + global-include *.x + global-include *.txt + global-exclude *.tmp + recursive-include f *.oo + recursive-exclude global *.x + graft dir + prune dir3 + """) + + for line in MANIFEST_IN.split('\n'): + if not line: + continue + file_list.process_template_line(line) + + wanted = [ + 'buildout.cfg', + 'four.txt', + 'ok', + ml('.hg/last-message.txt'), + ml('dir/graft-one'), + ml('dir/dir2/graft2'), + ml('f/o/f.oo'), + ml('global/one.txt'), + ml('global/two.txt'), + ] + + file_list.sort() + assert file_list.files == wanted + + def test_exclude_pattern(self): + # return False if no match + file_list = FileList() + assert not file_list.exclude_pattern('*.py') + + # return True if files match + file_list = FileList() + file_list.files = ['a.py', 'b.py'] + assert file_list.exclude_pattern('*.py') + + # test excludes + file_list = FileList() + file_list.files = ['a.py', 'a.txt'] + file_list.exclude_pattern('*.py') + file_list.sort() + assert file_list.files == ['a.txt'] + + def test_include_pattern(self): + # return False if no match + file_list = FileList() + self.make_files([]) + assert not file_list.include_pattern('*.py') + + # return True if files match + file_list = FileList() + self.make_files(['a.py', 'b.txt']) + assert file_list.include_pattern('*.py') + + # test * matches all files + file_list = FileList() + self.make_files(['a.py', 'b.txt']) + file_list.include_pattern('*') + file_list.sort() + assert file_list.files == ['a.py', 'b.txt'] + + def test_process_template_line_invalid(self): + # invalid lines + file_list = FileList() + for action in ('include', 'exclude', 'global-include', + 'global-exclude', 'recursive-include', + 'recursive-exclude', 'graft', 'prune', 'blarg'): + try: + file_list.process_template_line(action) + except DistutilsTemplateError: + pass + except Exception: + assert False, "Incorrect error thrown" + else: + assert False, "Should have thrown an error" + + def test_include(self): + ml = make_local_path + # include + file_list = FileList() + self.make_files(['a.py', 'b.txt', ml('d/c.py')]) + + file_list.process_template_line('include *.py') + file_list.sort() + assert file_list.files == ['a.py'] + self.assertNoWarnings() + + file_list.process_template_line('include *.rb') + file_list.sort() + assert file_list.files == ['a.py'] + self.assertWarnings() + + def test_exclude(self): + ml = make_local_path + # exclude + file_list = FileList() + file_list.files = ['a.py', 'b.txt', ml('d/c.py')] + + file_list.process_template_line('exclude *.py') + file_list.sort() + assert file_list.files == ['b.txt', ml('d/c.py')] + self.assertNoWarnings() + + file_list.process_template_line('exclude *.rb') + file_list.sort() + assert file_list.files == ['b.txt', ml('d/c.py')] + self.assertWarnings() + + def test_global_include(self): + ml = make_local_path + # global-include + file_list = FileList() + self.make_files(['a.py', 'b.txt', ml('d/c.py')]) + + file_list.process_template_line('global-include *.py') + file_list.sort() + assert file_list.files == ['a.py', ml('d/c.py')] + self.assertNoWarnings() + + file_list.process_template_line('global-include *.rb') + file_list.sort() + assert file_list.files == ['a.py', ml('d/c.py')] + self.assertWarnings() + + def test_global_exclude(self): + ml = make_local_path + # global-exclude + file_list = FileList() + file_list.files = ['a.py', 'b.txt', ml('d/c.py')] + + file_list.process_template_line('global-exclude *.py') + file_list.sort() + assert file_list.files == ['b.txt'] + self.assertNoWarnings() + + file_list.process_template_line('global-exclude *.rb') + file_list.sort() + assert file_list.files == ['b.txt'] + self.assertWarnings() + + def test_recursive_include(self): + ml = make_local_path + # recursive-include + file_list = FileList() + self.make_files(['a.py', ml('d/b.py'), ml('d/c.txt'), ml('d/d/e.py')]) + + file_list.process_template_line('recursive-include d *.py') + file_list.sort() + assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')] + self.assertNoWarnings() + + file_list.process_template_line('recursive-include e *.py') + file_list.sort() + assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')] + self.assertWarnings() + + def test_recursive_exclude(self): + ml = make_local_path + # recursive-exclude + file_list = FileList() + file_list.files = ['a.py', ml('d/b.py'), ml('d/c.txt'), ml('d/d/e.py')] + + file_list.process_template_line('recursive-exclude d *.py') + file_list.sort() + assert file_list.files == ['a.py', ml('d/c.txt')] + self.assertNoWarnings() + + file_list.process_template_line('recursive-exclude e *.py') + file_list.sort() + assert file_list.files == ['a.py', ml('d/c.txt')] + self.assertWarnings() + + def test_graft(self): + ml = make_local_path + # graft + file_list = FileList() + self.make_files(['a.py', ml('d/b.py'), ml('d/d/e.py'), ml('f/f.py')]) + + file_list.process_template_line('graft d') + file_list.sort() + assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')] + self.assertNoWarnings() + + file_list.process_template_line('graft e') + file_list.sort() + assert file_list.files == [ml('d/b.py'), ml('d/d/e.py')] + self.assertWarnings() + + def test_prune(self): + ml = make_local_path + # prune + file_list = FileList() + file_list.files = ['a.py', ml('d/b.py'), ml('d/d/e.py'), ml('f/f.py')] + + file_list.process_template_line('prune d') + file_list.sort() + assert file_list.files == ['a.py', ml('f/f.py')] + self.assertNoWarnings() + + file_list.process_template_line('prune e') + file_list.sort() + assert file_list.files == ['a.py', ml('f/f.py')] + self.assertWarnings() diff --git a/setuptools/tests/test_msvc.py b/setuptools/tests/test_msvc.py new file mode 100644 index 00000000..d1527bfa --- /dev/null +++ b/setuptools/tests/test_msvc.py @@ -0,0 +1,179 @@ +""" +Tests for msvc support module. +""" + +import os +import contextlib +import distutils.errors +import mock + +import pytest + +from . import contexts + +# importing only setuptools should apply the patch +__import__('setuptools') + +pytest.importorskip("distutils.msvc9compiler") + + +def mock_reg(hkcu=None, hklm=None): + """ + Return a mock for distutils.msvc9compiler.Reg, patched + to mock out the functions that access the registry. + """ + + _winreg = getattr(distutils.msvc9compiler, '_winreg', None) + winreg = getattr(distutils.msvc9compiler, 'winreg', _winreg) + + hives = { + winreg.HKEY_CURRENT_USER: hkcu or {}, + winreg.HKEY_LOCAL_MACHINE: hklm or {}, + } + + @classmethod + def read_keys(cls, base, key): + """Return list of registry keys.""" + hive = hives.get(base, {}) + return [ + k.rpartition('\\')[2] + for k in hive if k.startswith(key.lower()) + ] + + @classmethod + def read_values(cls, base, key): + """Return dict of registry keys and values.""" + hive = hives.get(base, {}) + return dict( + (k.rpartition('\\')[2], hive[k]) + for k in hive if k.startswith(key.lower()) + ) + + return mock.patch.multiple( + distutils.msvc9compiler.Reg, + read_keys=read_keys, read_values=read_values) + + +class TestModulePatch: + """ + Ensure that importing setuptools is sufficient to replace + the standard find_vcvarsall function with a version that + recognizes the "Visual C++ for Python" package. + """ + + key_32 = r'software\microsoft\devdiv\vcforpython\9.0\installdir' + key_64 = key_32.replace(r'\microsoft', r'\wow6432node\microsoft') + + def test_patched(self): + "Test the module is actually patched" + mod_name = distutils.msvc9compiler.find_vcvarsall.__module__ + assert mod_name == "setuptools.msvc", "find_vcvarsall unpatched" + + def test_no_registry_entries_means_nothing_found(self): + """ + No registry entries or environment variable should lead to an error + directing the user to download vcpython27. + """ + find_vcvarsall = distutils.msvc9compiler.find_vcvarsall + query_vcvarsall = distutils.msvc9compiler.query_vcvarsall + + with contexts.environment(VS90COMNTOOLS=None): + with mock_reg(): + assert find_vcvarsall(9.0) is None + + try: + query_vcvarsall(9.0) + except Exception as exc: + expected = distutils.errors.DistutilsPlatformError + assert isinstance(exc, expected) + assert 'aka.ms/vcpython27' in str(exc) + + @pytest.fixture + def user_preferred_setting(self): + """ + Set up environment with different install dirs for user vs. system + and yield the user_install_dir for the expected result. + """ + with self.mock_install_dir() as user_install_dir: + with self.mock_install_dir() as system_install_dir: + reg = mock_reg( + hkcu={ + self.key_32: user_install_dir, + }, + hklm={ + self.key_32: system_install_dir, + self.key_64: system_install_dir, + }, + ) + with reg: + yield user_install_dir + + def test_prefer_current_user(self, user_preferred_setting): + """ + Ensure user's settings are preferred. + """ + result = distutils.msvc9compiler.find_vcvarsall(9.0) + expected = os.path.join(user_preferred_setting, 'vcvarsall.bat') + assert expected == result + + @pytest.fixture + def local_machine_setting(self): + """ + Set up environment with only the system environment configured. + """ + with self.mock_install_dir() as system_install_dir: + reg = mock_reg( + hklm={ + self.key_32: system_install_dir, + }, + ) + with reg: + yield system_install_dir + + def test_local_machine_recognized(self, local_machine_setting): + """ + Ensure machine setting is honored if user settings are not present. + """ + result = distutils.msvc9compiler.find_vcvarsall(9.0) + expected = os.path.join(local_machine_setting, 'vcvarsall.bat') + assert expected == result + + @pytest.fixture + def x64_preferred_setting(self): + """ + Set up environment with 64-bit and 32-bit system settings configured + and yield the canonical location. + """ + with self.mock_install_dir() as x32_dir: + with self.mock_install_dir() as x64_dir: + reg = mock_reg( + hklm={ + # This *should* only exist on 32-bit machines + self.key_32: x32_dir, + # This *should* only exist on 64-bit machines + self.key_64: x64_dir, + }, + ) + with reg: + yield x32_dir + + def test_ensure_64_bit_preferred(self, x64_preferred_setting): + """ + Ensure 64-bit system key is preferred. + """ + result = distutils.msvc9compiler.find_vcvarsall(9.0) + expected = os.path.join(x64_preferred_setting, 'vcvarsall.bat') + assert expected == result + + @staticmethod + @contextlib.contextmanager + def mock_install_dir(): + """ + Make a mock install dir in a unique location so that tests can + distinguish which dir was detected in a given scenario. + """ + with contexts.tempdir() as result: + vcvarsall = os.path.join(result, 'vcvarsall.bat') + with open(vcvarsall, 'w'): + pass + yield result diff --git a/setuptools/tests/test_msvc14.py b/setuptools/tests/test_msvc14.py new file mode 100644 index 00000000..1aca12dd --- /dev/null +++ b/setuptools/tests/test_msvc14.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +""" +Tests for msvc support module (msvc14 unit tests). +""" + +import os +from distutils.errors import DistutilsPlatformError +import pytest +import sys + + +@pytest.mark.skipif(sys.platform != "win32", + reason="These tests are only for win32") +class TestMSVC14: + """Python 3.8 "distutils/tests/test_msvccompiler.py" backport""" + def test_no_compiler(self): + import setuptools.msvc as _msvccompiler + # makes sure query_vcvarsall raises + # a DistutilsPlatformError if the compiler + # is not found + + def _find_vcvarsall(plat_spec): + return None, None + + old_find_vcvarsall = _msvccompiler._msvc14_find_vcvarsall + _msvccompiler._msvc14_find_vcvarsall = _find_vcvarsall + try: + pytest.raises(DistutilsPlatformError, + _msvccompiler._msvc14_get_vc_env, + 'wont find this version') + finally: + _msvccompiler._msvc14_find_vcvarsall = old_find_vcvarsall + + def test_get_vc_env_unicode(self): + import setuptools.msvc as _msvccompiler + + test_var = 'ṰḖṤṪ┅ṼẨṜ' + test_value = '₃⁴₅' + + # Ensure we don't early exit from _get_vc_env + old_distutils_use_sdk = os.environ.pop('DISTUTILS_USE_SDK', None) + os.environ[test_var] = test_value + try: + env = _msvccompiler._msvc14_get_vc_env('x86') + assert test_var.lower() in env + assert test_value == env[test_var.lower()] + finally: + os.environ.pop(test_var) + if old_distutils_use_sdk: + os.environ['DISTUTILS_USE_SDK'] = old_distutils_use_sdk + + def test_get_vc2017(self): + import setuptools.msvc as _msvccompiler + + # This function cannot be mocked, so pass it if we find VS 2017 + # and mark it skipped if we do not. + version, path = _msvccompiler._msvc14_find_vc2017() + if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') in [ + 'Visual Studio 2017' + ]: + assert version + if version: + assert version >= 15 + assert os.path.isdir(path) + else: + pytest.skip("VS 2017 is not installed") + + def test_get_vc2015(self): + import setuptools.msvc as _msvccompiler + + # This function cannot be mocked, so pass it if we find VS 2015 + # and mark it skipped if we do not. + version, path = _msvccompiler._msvc14_find_vc2015() + if os.environ.get('APPVEYOR_BUILD_WORKER_IMAGE', '') in [ + 'Visual Studio 2015', 'Visual Studio 2017' + ]: + assert version + if version: + assert version >= 14 + assert os.path.isdir(path) + else: + pytest.skip("VS 2015 is not installed") diff --git a/setuptools/tests/test_namespaces.py b/setuptools/tests/test_namespaces.py new file mode 100644 index 00000000..270f90c9 --- /dev/null +++ b/setuptools/tests/test_namespaces.py @@ -0,0 +1,134 @@ +import sys +import subprocess + +import pytest + +from . import namespaces +from setuptools.command import test + + +class TestNamespaces: + + @pytest.mark.skipif( + sys.version_info < (3, 5), + reason="Requires importlib.util.module_from_spec", + ) + def test_mixed_site_and_non_site(self, tmpdir): + """ + Installing two packages sharing the same namespace, one installed + to a site dir and the other installed just to a path on PYTHONPATH + should leave the namespace in tact and both packages reachable by + import. + """ + pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA') + pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB') + site_packages = tmpdir / 'site-packages' + path_packages = tmpdir / 'path-packages' + targets = site_packages, path_packages + # use pip to install to the target directory + install_cmd = [ + sys.executable, + '-m', + 'pip.__main__', + 'install', + str(pkg_A), + '-t', str(site_packages), + ] + subprocess.check_call(install_cmd) + namespaces.make_site_dir(site_packages) + install_cmd = [ + sys.executable, + '-m', + 'pip.__main__', + 'install', + str(pkg_B), + '-t', str(path_packages), + ] + subprocess.check_call(install_cmd) + try_import = [ + sys.executable, + '-c', 'import myns.pkgA; import myns.pkgB', + ] + with test.test.paths_on_pythonpath(map(str, targets)): + subprocess.check_call(try_import) + + def test_pkg_resources_import(self, tmpdir): + """ + Ensure that a namespace package doesn't break on import + of pkg_resources. + """ + pkg = namespaces.build_namespace_package(tmpdir, 'myns.pkgA') + target = tmpdir / 'packages' + target.mkdir() + install_cmd = [ + sys.executable, + '-m', 'pip', + 'install', + '-t', str(target), + str(pkg), + ] + with test.test.paths_on_pythonpath([str(target)]): + subprocess.check_call(install_cmd) + namespaces.make_site_dir(target) + try_import = [ + sys.executable, + '-c', 'import pkg_resources', + ] + with test.test.paths_on_pythonpath([str(target)]): + subprocess.check_call(try_import) + + def test_namespace_package_installed_and_cwd(self, tmpdir): + """ + Installing a namespace packages but also having it in the current + working directory, only one version should take precedence. + """ + pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA') + target = tmpdir / 'packages' + # use pip to install to the target directory + install_cmd = [ + sys.executable, + '-m', + 'pip.__main__', + 'install', + str(pkg_A), + '-t', str(target), + ] + subprocess.check_call(install_cmd) + namespaces.make_site_dir(target) + + # ensure that package imports and pkg_resources imports + pkg_resources_imp = [ + sys.executable, + '-c', 'import pkg_resources; import myns.pkgA', + ] + with test.test.paths_on_pythonpath([str(target)]): + subprocess.check_call(pkg_resources_imp, cwd=str(pkg_A)) + + def test_packages_in_the_same_namespace_installed_and_cwd(self, tmpdir): + """ + Installing one namespace package and also have another in the same + namespace in the current working directory, both of them must be + importable. + """ + pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA') + pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB') + target = tmpdir / 'packages' + # use pip to install to the target directory + install_cmd = [ + sys.executable, + '-m', + 'pip.__main__', + 'install', + str(pkg_A), + '-t', str(target), + ] + subprocess.check_call(install_cmd) + namespaces.make_site_dir(target) + + # ensure that all packages import and pkg_resources imports + pkg_resources_imp = [ + sys.executable, + '-c', 'import pkg_resources; import myns.pkgA; import myns.pkgB', + ] + with test.test.paths_on_pythonpath([str(target)]): + subprocess.check_call(pkg_resources_imp, cwd=str(pkg_B)) diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py new file mode 100644 index 00000000..8e9435ef --- /dev/null +++ b/setuptools/tests/test_packageindex.py @@ -0,0 +1,310 @@ +import sys +import os +import distutils.errors +import platform +import urllib.request +import urllib.error +import http.client + +import mock +import pytest + +import setuptools.package_index +from .textwrap import DALS + + +class TestPackageIndex: + def test_regex(self): + hash_url = 'http://other_url?:action=show_md5&' + hash_url += 'digest=0123456789abcdef0123456789abcdef' + doc = """ + <a href="http://some_url">Name</a> + (<a title="MD5 hash" + href="{hash_url}">md5</a>) + """.lstrip().format(**locals()) + assert setuptools.package_index.PYPI_MD5.match(doc) + + def test_bad_url_bad_port(self): + index = setuptools.package_index.PackageIndex() + url = 'http://127.0.0.1:0/nonesuch/test_package_index' + try: + v = index.open_url(url) + except Exception as v: + assert url in str(v) + else: + assert isinstance(v, urllib.error.HTTPError) + + def test_bad_url_typo(self): + # issue 16 + # easy_install inquant.contentmirror.plone breaks because of a typo + # in its home URL + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + + url = ( + 'url:%20https://svn.plone.org/svn' + '/collective/inquant.contentmirror.plone/trunk' + ) + try: + v = index.open_url(url) + except Exception as v: + assert url in str(v) + else: + assert isinstance(v, urllib.error.HTTPError) + + def test_bad_url_bad_status_line(self): + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + + def _urlopen(*args): + raise http.client.BadStatusLine('line') + + index.opener = _urlopen + url = 'http://example.com' + try: + index.open_url(url) + except Exception as exc: + assert 'line' in str(exc) + else: + raise AssertionError('Should have raise here!') + + def test_bad_url_double_scheme(self): + """ + A bad URL with a double scheme should raise a DistutilsError. + """ + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + + # issue 20 + url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' + try: + index.open_url(url) + except distutils.errors.DistutilsError as error: + msg = str(error) + assert ( + 'nonnumeric port' in msg + or 'getaddrinfo failed' in msg + or 'Name or service not known' in msg + ) + return + raise RuntimeError("Did not raise") + + def test_bad_url_screwy_href(self): + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + + # issue #160 + if sys.version_info[0] == 2 and sys.version_info[1] == 7: + # this should not fail + url = 'http://example.com' + page = ('<a href="http://www.famfamfam.com](' + 'http://www.famfamfam.com/">') + index.process_index(url, page) + + def test_url_ok(self): + index = setuptools.package_index.PackageIndex( + hosts=('www.example.com',) + ) + url = 'file:///tmp/test_package_index' + assert index.url_ok(url, True) + + def test_parse_bdist_wininst(self): + parse = setuptools.package_index.parse_bdist_wininst + + actual = parse('reportlab-2.5.win32-py2.4.exe') + expected = 'reportlab-2.5', '2.4', 'win32' + assert actual == expected + + actual = parse('reportlab-2.5.win32.exe') + expected = 'reportlab-2.5', None, 'win32' + assert actual == expected + + actual = parse('reportlab-2.5.win-amd64-py2.7.exe') + expected = 'reportlab-2.5', '2.7', 'win-amd64' + assert actual == expected + + actual = parse('reportlab-2.5.win-amd64.exe') + expected = 'reportlab-2.5', None, 'win-amd64' + assert actual == expected + + def test__vcs_split_rev_from_url(self): + """ + Test the basic usage of _vcs_split_rev_from_url + """ + vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url + url, rev = vsrfu('https://example.com/bar@2995') + assert url == 'https://example.com/bar' + assert rev == '2995' + + def test_local_index(self, tmpdir): + """ + local_open should be able to read an index from the file system. + """ + index_file = tmpdir / 'index.html' + with index_file.open('w') as f: + f.write('<div>content</div>') + url = 'file:' + urllib.request.pathname2url(str(tmpdir)) + '/' + res = setuptools.package_index.local_open(url) + assert 'content' in res.read() + + def test_egg_fragment(self): + """ + EGG fragments must comply to PEP 440 + """ + epoch = [ + '', + '1!', + ] + releases = [ + '0', + '0.0', + '0.0.0', + ] + pre = [ + 'a0', + 'b0', + 'rc0', + ] + post = [ + '.post0' + ] + dev = [ + '.dev0', + ] + local = [ + ('', ''), + ('+ubuntu.0', '+ubuntu.0'), + ('+ubuntu-0', '+ubuntu.0'), + ('+ubuntu_0', '+ubuntu.0'), + ] + versions = [ + [''.join([e, r, p, loc]) for loc in locs] + for e in epoch + for r in releases + for p in sum([pre, post, dev], ['']) + for locs in local] + for v, vc in versions: + dists = list(setuptools.package_index.distros_for_url( + 'http://example.com/example.zip#egg=example-' + v)) + assert dists[0].version == '' + assert dists[1].version == vc + + def test_download_git_with_rev(self, tmpdir): + url = 'git+https://github.example/group/project@master#egg=foo' + index = setuptools.package_index.PackageIndex() + + with mock.patch("os.system") as os_system_mock: + result = index.download(url, str(tmpdir)) + + os_system_mock.assert_called() + + expected_dir = str(tmpdir / 'project@master') + expected = ( + 'git clone --quiet ' + 'https://github.example/group/project {expected_dir}' + ).format(**locals()) + first_call_args = os_system_mock.call_args_list[0][0] + assert first_call_args == (expected,) + + tmpl = 'git -C {expected_dir} checkout --quiet master' + expected = tmpl.format(**locals()) + assert os_system_mock.call_args_list[1][0] == (expected,) + assert result == expected_dir + + def test_download_git_no_rev(self, tmpdir): + url = 'git+https://github.example/group/project#egg=foo' + index = setuptools.package_index.PackageIndex() + + with mock.patch("os.system") as os_system_mock: + result = index.download(url, str(tmpdir)) + + os_system_mock.assert_called() + + expected_dir = str(tmpdir / 'project') + expected = ( + 'git clone --quiet ' + 'https://github.example/group/project {expected_dir}' + ).format(**locals()) + os_system_mock.assert_called_once_with(expected) + + def test_download_svn(self, tmpdir): + url = 'svn+https://svn.example/project#egg=foo' + index = setuptools.package_index.PackageIndex() + + with pytest.warns(UserWarning): + with mock.patch("os.system") as os_system_mock: + result = index.download(url, str(tmpdir)) + + os_system_mock.assert_called() + + expected_dir = str(tmpdir / 'project') + expected = ( + 'svn checkout -q ' + 'svn+https://svn.example/project {expected_dir}' + ).format(**locals()) + os_system_mock.assert_called_once_with(expected) + + +class TestContentCheckers: + def test_md5(self): + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') + checker.feed('You should probably not be using MD5'.encode('ascii')) + assert checker.hash.hexdigest() == 'f12895fdffbd45007040d2e44df98478' + assert checker.is_valid() + + def test_other_fragment(self): + "Content checks should succeed silently if no hash is present" + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#something%20completely%20different') + checker.feed('anything'.encode('ascii')) + assert checker.is_valid() + + def test_blank_md5(self): + "Content checks should succeed if a hash is empty" + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#md5=') + checker.feed('anything'.encode('ascii')) + assert checker.is_valid() + + def test_get_hash_name_md5(self): + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') + assert checker.hash_name == 'md5' + + def test_report(self): + checker = setuptools.package_index.HashChecker.from_url( + 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') + rep = checker.report(lambda x: x, 'My message about %s') + assert rep == 'My message about md5' + + +@pytest.fixture +def temp_home(tmpdir, monkeypatch): + key = ( + 'USERPROFILE' + if platform.system() == 'Windows' and sys.version_info > (3, 8) else + 'HOME' + ) + + monkeypatch.setitem(os.environ, key, str(tmpdir)) + return tmpdir + + +class TestPyPIConfig: + def test_percent_in_password(self, temp_home): + pypirc = temp_home / '.pypirc' + pypirc.write(DALS(""" + [pypi] + repository=https://pypi.org + username=jaraco + password=pity% + """)) + cfg = setuptools.package_index.PyPIConfig() + cred = cfg.creds_by_repository['https://pypi.org'] + assert cred.username == 'jaraco' + assert cred.password == 'pity%' diff --git a/setuptools/tests/test_register.py b/setuptools/tests/test_register.py new file mode 100644 index 00000000..98605806 --- /dev/null +++ b/setuptools/tests/test_register.py @@ -0,0 +1,22 @@ +from setuptools.command.register import register +from setuptools.dist import Distribution +from setuptools.errors import RemovedCommandError + +try: + from unittest import mock +except ImportError: + import mock + +import pytest + + +class TestRegister: + def test_register_exception(self): + """Ensure that the register command has been properly removed.""" + dist = Distribution() + dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())] + + cmd = register(dist) + + with pytest.raises(RemovedCommandError): + cmd.run() diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py new file mode 100644 index 00000000..99398cdb --- /dev/null +++ b/setuptools/tests/test_sandbox.py @@ -0,0 +1,134 @@ +"""develop tests +""" +import os +import types + +import pytest + +import pkg_resources +import setuptools.sandbox + + +class TestSandbox: + def test_devnull(self, tmpdir): + with setuptools.sandbox.DirectorySandbox(str(tmpdir)): + self._file_writer(os.devnull) + + @staticmethod + def _file_writer(path): + def do_write(): + with open(path, 'w') as f: + f.write('xxx') + + return do_write + + def test_setup_py_with_BOM(self): + """ + It should be possible to execute a setup.py with a Byte Order Mark + """ + target = pkg_resources.resource_filename( + __name__, + 'script-with-bom.py') + namespace = types.ModuleType('namespace') + setuptools.sandbox._execfile(target, vars(namespace)) + assert namespace.result == 'passed' + + def test_setup_py_with_CRLF(self, tmpdir): + setup_py = tmpdir / 'setup.py' + with setup_py.open('wb') as stream: + stream.write(b'"degenerate script"\r\n') + setuptools.sandbox._execfile(str(setup_py), globals()) + + +class TestExceptionSaver: + def test_exception_trapped(self): + with setuptools.sandbox.ExceptionSaver(): + raise ValueError("details") + + def test_exception_resumed(self): + with setuptools.sandbox.ExceptionSaver() as saved_exc: + raise ValueError("details") + + with pytest.raises(ValueError) as caught: + saved_exc.resume() + + assert isinstance(caught.value, ValueError) + assert str(caught.value) == 'details' + + def test_exception_reconstructed(self): + orig_exc = ValueError("details") + + with setuptools.sandbox.ExceptionSaver() as saved_exc: + raise orig_exc + + with pytest.raises(ValueError) as caught: + saved_exc.resume() + + assert isinstance(caught.value, ValueError) + assert caught.value is not orig_exc + + def test_no_exception_passes_quietly(self): + with setuptools.sandbox.ExceptionSaver() as saved_exc: + pass + + saved_exc.resume() + + def test_unpickleable_exception(self): + class CantPickleThis(Exception): + "This Exception is unpickleable because it's not in globals" + def __repr__(self): + return 'CantPickleThis%r' % (self.args,) + + with setuptools.sandbox.ExceptionSaver() as saved_exc: + raise CantPickleThis('detail') + + with pytest.raises(setuptools.sandbox.UnpickleableException) as caught: + saved_exc.resume() + + assert str(caught.value) == "CantPickleThis('detail',)" + + def test_unpickleable_exception_when_hiding_setuptools(self): + """ + As revealed in #440, an infinite recursion can occur if an unpickleable + exception while setuptools is hidden. Ensure this doesn't happen. + """ + + class ExceptionUnderTest(Exception): + """ + An unpickleable exception (not in globals). + """ + + with pytest.raises(setuptools.sandbox.UnpickleableException) as caught: + with setuptools.sandbox.save_modules(): + setuptools.sandbox.hide_setuptools() + raise ExceptionUnderTest() + + msg, = caught.value.args + assert msg == 'ExceptionUnderTest()' + + def test_sandbox_violation_raised_hiding_setuptools(self, tmpdir): + """ + When in a sandbox with setuptools hidden, a SandboxViolation + should reflect a proper exception and not be wrapped in + an UnpickleableException. + """ + + def write_file(): + "Trigger a SandboxViolation by writing outside the sandbox" + with open('/etc/foo', 'w'): + pass + + with pytest.raises(setuptools.sandbox.SandboxViolation) as caught: + with setuptools.sandbox.save_modules(): + setuptools.sandbox.hide_setuptools() + with setuptools.sandbox.DirectorySandbox(str(tmpdir)): + write_file() + + cmd, args, kwargs = caught.value.args + assert cmd == 'open' + assert args == ('/etc/foo', 'w') + assert kwargs == {} + + msg = str(caught.value) + assert 'open' in msg + assert "('/etc/foo', 'w')" in msg diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py new file mode 100644 index 00000000..66f46ad0 --- /dev/null +++ b/setuptools/tests/test_sdist.py @@ -0,0 +1,535 @@ +"""sdist tests""" + +import os +import sys +import tempfile +import unicodedata +import contextlib +import io +from unittest import mock + +import pytest + +import pkg_resources +from setuptools import SetuptoolsDeprecationWarning +from setuptools.command.sdist import sdist +from setuptools.command.egg_info import manifest_maker +from setuptools.dist import Distribution +from setuptools.tests import fail_on_ascii +from .text import Filenames + + +SETUP_ATTRS = { + 'name': 'sdist_test', + 'version': '0.0', + 'packages': ['sdist_test'], + 'package_data': {'sdist_test': ['*.txt']}, + 'data_files': [("data", [os.path.join("d", "e.dat")])], +} + +SETUP_PY = """\ +from setuptools import setup + +setup(**%r) +""" % SETUP_ATTRS + + +@contextlib.contextmanager +def quiet(): + old_stdout, old_stderr = sys.stdout, sys.stderr + sys.stdout, sys.stderr = io.StringIO(), io.StringIO() + try: + yield + finally: + sys.stdout, sys.stderr = old_stdout, old_stderr + + +# Convert to POSIX path +def posix(path): + if not isinstance(path, str): + return path.replace(os.sep.encode('ascii'), b'/') + else: + return path.replace(os.sep, '/') + + +# HFS Plus uses decomposed UTF-8 +def decompose(path): + if isinstance(path, str): + return unicodedata.normalize('NFD', path) + try: + path = path.decode('utf-8') + path = unicodedata.normalize('NFD', path) + path = path.encode('utf-8') + except UnicodeError: + pass # Not UTF-8 + return path + + +def read_all_bytes(filename): + with io.open(filename, 'rb') as fp: + return fp.read() + + +def latin1_fail(): + try: + desc, filename = tempfile.mkstemp(suffix=Filenames.latin_1) + os.close(desc) + os.remove(filename) + except Exception: + return True + + +fail_on_latin1_encoded_filenames = pytest.mark.xfail( + latin1_fail(), + reason="System does not support latin-1 filenames", +) + + +def touch(path): + path.write_text('', encoding='utf-8') + + +class TestSdistTest: + @pytest.fixture(autouse=True) + def source_dir(self, tmpdir): + (tmpdir / 'setup.py').write_text(SETUP_PY, encoding='utf-8') + + # Set up the rest of the test package + test_pkg = tmpdir / 'sdist_test' + test_pkg.mkdir() + data_folder = tmpdir / 'd' + data_folder.mkdir() + # *.rst was not included in package_data, so c.rst should not be + # automatically added to the manifest when not under version control + for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']: + touch(test_pkg / fname) + touch(data_folder / 'e.dat') + + with tmpdir.as_cwd(): + yield + + def assert_package_data_in_manifest(self, cmd): + manifest = cmd.filelist.files + assert os.path.join('sdist_test', 'a.txt') in manifest + assert os.path.join('sdist_test', 'b.txt') in manifest + assert os.path.join('sdist_test', 'c.rst') not in manifest + assert os.path.join('d', 'e.dat') in manifest + + def test_package_data_in_sdist(self): + """Regression test for pull request #4: ensures that files listed in + package_data are included in the manifest even if they're not added to + version control. + """ + + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + with quiet(): + cmd.run() + + self.assert_package_data_in_manifest(cmd) + + def test_package_data_and_include_package_data_in_sdist(self): + """ + Ensure package_data and include_package_data work + together. + """ + setup_attrs = {**SETUP_ATTRS, 'include_package_data': True} + assert setup_attrs['package_data'] + + dist = Distribution(setup_attrs) + dist.script_name = 'setup.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + with quiet(): + cmd.run() + + self.assert_package_data_in_manifest(cmd) + + def test_custom_build_py(self): + """ + Ensure projects defining custom build_py don't break + when creating sdists (issue #2849) + """ + from distutils.command.build_py import build_py as OrigBuildPy + + using_custom_command_guard = mock.Mock() + + class CustomBuildPy(OrigBuildPy): + """ + Some projects have custom commands inheriting from `distutils` + """ + + def get_data_files(self): + using_custom_command_guard() + return super().get_data_files() + + setup_attrs = {**SETUP_ATTRS, 'include_package_data': True} + assert setup_attrs['package_data'] + + dist = Distribution(setup_attrs) + dist.script_name = 'setup.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + # Make sure we use the custom command + cmd.cmdclass = {'build_py': CustomBuildPy} + cmd.distribution.cmdclass = {'build_py': CustomBuildPy} + assert cmd.distribution.get_command_class('build_py') == CustomBuildPy + + msg = "setuptools instead of distutils" + with quiet(), pytest.warns(SetuptoolsDeprecationWarning, match=msg): + cmd.run() + + using_custom_command_guard.assert_called() + self.assert_package_data_in_manifest(cmd) + + def test_setup_py_exists(self): + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'foo.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + with quiet(): + cmd.run() + + manifest = cmd.filelist.files + assert 'setup.py' in manifest + + def test_setup_py_missing(self): + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'foo.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + if os.path.exists("setup.py"): + os.remove("setup.py") + with quiet(): + cmd.run() + + manifest = cmd.filelist.files + assert 'setup.py' not in manifest + + def test_setup_py_excluded(self): + with open("MANIFEST.in", "w") as manifest_file: + manifest_file.write("exclude setup.py") + + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'foo.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + with quiet(): + cmd.run() + + manifest = cmd.filelist.files + assert 'setup.py' not in manifest + + def test_defaults_case_sensitivity(self, tmpdir): + """ + Make sure default files (README.*, etc.) are added in a case-sensitive + way to avoid problems with packages built on Windows. + """ + + touch(tmpdir / 'readme.rst') + touch(tmpdir / 'SETUP.cfg') + + dist = Distribution(SETUP_ATTRS) + # the extension deliberately capitalized for this test + # to make sure the actual filename (not capitalized) gets added + # to the manifest + dist.script_name = 'setup.PY' + cmd = sdist(dist) + cmd.ensure_finalized() + + with quiet(): + cmd.run() + + # lowercase all names so we can test in a + # case-insensitive way to make sure the files + # are not included. + manifest = map(lambda x: x.lower(), cmd.filelist.files) + assert 'readme.rst' not in manifest, manifest + assert 'setup.py' not in manifest, manifest + assert 'setup.cfg' not in manifest, manifest + + @fail_on_ascii + def test_manifest_is_written_with_utf8_encoding(self): + # Test for #303. + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + mm = manifest_maker(dist) + mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') + os.mkdir('sdist_test.egg-info') + + # UTF-8 filename + filename = os.path.join('sdist_test', 'smörbröd.py') + + # Must create the file or it will get stripped. + open(filename, 'w').close() + + # Add UTF-8 filename and write manifest + with quiet(): + mm.run() + mm.filelist.append(filename) + mm.write_manifest() + + contents = read_all_bytes(mm.manifest) + + # The manifest should be UTF-8 encoded + u_contents = contents.decode('UTF-8') + + # The manifest should contain the UTF-8 filename + assert posix(filename) in u_contents + + @fail_on_ascii + def test_write_manifest_allows_utf8_filenames(self): + # Test for #303. + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + mm = manifest_maker(dist) + mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') + os.mkdir('sdist_test.egg-info') + + filename = os.path.join(b'sdist_test', Filenames.utf_8) + + # Must touch the file or risk removal + open(filename, "w").close() + + # Add filename and write manifest + with quiet(): + mm.run() + u_filename = filename.decode('utf-8') + mm.filelist.files.append(u_filename) + # Re-write manifest + mm.write_manifest() + + contents = read_all_bytes(mm.manifest) + + # The manifest should be UTF-8 encoded + contents.decode('UTF-8') + + # The manifest should contain the UTF-8 filename + assert posix(filename) in contents + + # The filelist should have been updated as well + assert u_filename in mm.filelist.files + + def test_write_manifest_skips_non_utf8_filenames(self): + """ + Files that cannot be encoded to UTF-8 (specifically, those that + weren't originally successfully decoded and have surrogate + escapes) should be omitted from the manifest. + See https://bitbucket.org/tarek/distribute/issue/303 for history. + """ + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + mm = manifest_maker(dist) + mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') + os.mkdir('sdist_test.egg-info') + + # Latin-1 filename + filename = os.path.join(b'sdist_test', Filenames.latin_1) + + # Add filename with surrogates and write manifest + with quiet(): + mm.run() + u_filename = filename.decode('utf-8', 'surrogateescape') + mm.filelist.append(u_filename) + # Re-write manifest + mm.write_manifest() + + contents = read_all_bytes(mm.manifest) + + # The manifest should be UTF-8 encoded + contents.decode('UTF-8') + + # The Latin-1 filename should have been skipped + assert posix(filename) not in contents + + # The filelist should have been updated as well + assert u_filename not in mm.filelist.files + + @fail_on_ascii + def test_manifest_is_read_with_utf8_encoding(self): + # Test for #303. + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + # Create manifest + with quiet(): + cmd.run() + + # Add UTF-8 filename to manifest + filename = os.path.join(b'sdist_test', Filenames.utf_8) + cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') + manifest = open(cmd.manifest, 'ab') + manifest.write(b'\n' + filename) + manifest.close() + + # The file must exist to be included in the filelist + open(filename, 'w').close() + + # Re-read manifest + cmd.filelist.files = [] + with quiet(): + cmd.read_manifest() + + # The filelist should contain the UTF-8 filename + filename = filename.decode('utf-8') + assert filename in cmd.filelist.files + + @fail_on_latin1_encoded_filenames + def test_read_manifest_skips_non_utf8_filenames(self): + # Test for #303. + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + # Create manifest + with quiet(): + cmd.run() + + # Add Latin-1 filename to manifest + filename = os.path.join(b'sdist_test', Filenames.latin_1) + cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') + manifest = open(cmd.manifest, 'ab') + manifest.write(b'\n' + filename) + manifest.close() + + # The file must exist to be included in the filelist + open(filename, 'w').close() + + # Re-read manifest + cmd.filelist.files = [] + with quiet(): + cmd.read_manifest() + + # The Latin-1 filename should have been skipped + filename = filename.decode('latin-1') + assert filename not in cmd.filelist.files + + @fail_on_ascii + @fail_on_latin1_encoded_filenames + def test_sdist_with_utf8_encoded_filename(self): + # Test for #303. + dist = Distribution(self.make_strings(SETUP_ATTRS)) + dist.script_name = 'setup.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + filename = os.path.join(b'sdist_test', Filenames.utf_8) + open(filename, 'w').close() + + with quiet(): + cmd.run() + + if sys.platform == 'darwin': + filename = decompose(filename) + + fs_enc = sys.getfilesystemencoding() + + if sys.platform == 'win32': + if fs_enc == 'cp1252': + # Python mangles the UTF-8 filename + filename = filename.decode('cp1252') + assert filename in cmd.filelist.files + else: + filename = filename.decode('mbcs') + assert filename in cmd.filelist.files + else: + filename = filename.decode('utf-8') + assert filename in cmd.filelist.files + + @classmethod + def make_strings(cls, item): + if isinstance(item, dict): + return { + key: cls.make_strings(value) for key, value in item.items()} + if isinstance(item, list): + return list(map(cls.make_strings, item)) + return str(item) + + @fail_on_latin1_encoded_filenames + def test_sdist_with_latin1_encoded_filename(self): + # Test for #303. + dist = Distribution(self.make_strings(SETUP_ATTRS)) + dist.script_name = 'setup.py' + cmd = sdist(dist) + cmd.ensure_finalized() + + # Latin-1 filename + filename = os.path.join(b'sdist_test', Filenames.latin_1) + open(filename, 'w').close() + assert os.path.isfile(filename) + + with quiet(): + cmd.run() + + # not all windows systems have a default FS encoding of cp1252 + if sys.platform == 'win32': + # Latin-1 is similar to Windows-1252 however + # on mbcs filesys it is not in latin-1 encoding + fs_enc = sys.getfilesystemencoding() + if fs_enc != 'mbcs': + fs_enc = 'latin-1' + filename = filename.decode(fs_enc) + + assert filename in cmd.filelist.files + else: + # The Latin-1 filename should have been skipped + filename = filename.decode('latin-1') + filename not in cmd.filelist.files + + def test_pyproject_toml_in_sdist(self, tmpdir): + """ + Check if pyproject.toml is included in source distribution if present + """ + touch(tmpdir / 'pyproject.toml') + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + cmd = sdist(dist) + cmd.ensure_finalized() + with quiet(): + cmd.run() + manifest = cmd.filelist.files + assert 'pyproject.toml' in manifest + + def test_pyproject_toml_excluded(self, tmpdir): + """ + Check that pyproject.toml can excluded even if present + """ + touch(tmpdir / 'pyproject.toml') + with open('MANIFEST.in', 'w') as mts: + print('exclude pyproject.toml', file=mts) + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + cmd = sdist(dist) + cmd.ensure_finalized() + with quiet(): + cmd.run() + manifest = cmd.filelist.files + assert 'pyproject.toml' not in manifest + + +def test_default_revctrl(): + """ + When _default_revctrl was removed from the `setuptools.command.sdist` + module in 10.0, it broke some systems which keep an old install of + setuptools (Distribute) around. Those old versions require that the + setuptools package continue to implement that interface, so this + function provides that interface, stubbed. See #320 for details. + + This interface must be maintained until Ubuntu 12.04 is no longer + supported (by Setuptools). + """ + ep_def = 'svn_cvs = setuptools.command.sdist:_default_revctrl' + ep = pkg_resources.EntryPoint.parse(ep_def) + res = ep.resolve() + assert hasattr(res, '__iter__') diff --git a/setuptools/tests/test_setopt.py b/setuptools/tests/test_setopt.py new file mode 100644 index 00000000..36008632 --- /dev/null +++ b/setuptools/tests/test_setopt.py @@ -0,0 +1,41 @@ +import io +import configparser + +from setuptools.command import setopt + + +class TestEdit: + @staticmethod + def parse_config(filename): + parser = configparser.ConfigParser() + with io.open(filename, encoding='utf-8') as reader: + parser.read_file(reader) + return parser + + @staticmethod + def write_text(file, content): + with io.open(file, 'wb') as strm: + strm.write(content.encode('utf-8')) + + def test_utf8_encoding_retained(self, tmpdir): + """ + When editing a file, non-ASCII characters encoded in + UTF-8 should be retained. + """ + config = tmpdir.join('setup.cfg') + self.write_text(str(config), '[names]\njaraco=джарако') + setopt.edit_config(str(config), dict(names=dict(other='yes'))) + parser = self.parse_config(str(config)) + assert parser.get('names', 'jaraco') == 'джарако' + assert parser.get('names', 'other') == 'yes' + + def test_case_retained(self, tmpdir): + """ + When editing a file, case of keys should be retained. + """ + config = tmpdir.join('setup.cfg') + self.write_text(str(config), '[names]\nFoO=bAr') + setopt.edit_config(str(config), dict(names=dict(oTher='yes'))) + actual = config.read_text(encoding='ascii') + assert 'FoO' in actual + assert 'oTher' in actual diff --git a/setuptools/tests/test_setuptools.py b/setuptools/tests/test_setuptools.py new file mode 100644 index 00000000..42f8e18b --- /dev/null +++ b/setuptools/tests/test_setuptools.py @@ -0,0 +1,295 @@ +"""Tests for the 'setuptools' package""" + +import sys +import os +import distutils.core +import distutils.cmd +from distutils.errors import DistutilsOptionError +from distutils.errors import DistutilsSetupError +from distutils.core import Extension +from distutils.version import LooseVersion + +import pytest + +import setuptools +import setuptools.dist +import setuptools.depends as dep +from setuptools.depends import Require + + +def makeSetup(**args): + """Return distribution from 'setup(**args)', without executing commands""" + + distutils.core._setup_stop_after = "commandline" + + # Don't let system command line leak into tests! + args.setdefault('script_args', ['install']) + + try: + return setuptools.setup(**args) + finally: + distutils.core._setup_stop_after = None + + +needs_bytecode = pytest.mark.skipif( + not hasattr(dep, 'get_module_constant'), + reason="bytecode support not available", +) + + +class TestDepends: + def testExtractConst(self): + if not hasattr(dep, 'extract_constant'): + # skip on non-bytecode platforms + return + + def f1(): + global x, y, z + x = "test" + y = z + + fc = f1.__code__ + + # unrecognized name + assert dep.extract_constant(fc, 'q', -1) is None + + # constant assigned + dep.extract_constant(fc, 'x', -1) == "test" + + # expression assigned + dep.extract_constant(fc, 'y', -1) == -1 + + # recognized name, not assigned + dep.extract_constant(fc, 'z', -1) is None + + def testFindModule(self): + with pytest.raises(ImportError): + dep.find_module('no-such.-thing') + with pytest.raises(ImportError): + dep.find_module('setuptools.non-existent') + f, p, i = dep.find_module('setuptools.tests') + f.close() + + @needs_bytecode + def testModuleExtract(self): + from json import __version__ + assert dep.get_module_constant('json', '__version__') == __version__ + assert dep.get_module_constant('sys', 'version') == sys.version + assert dep.get_module_constant( + 'setuptools.tests.test_setuptools', '__doc__') == __doc__ + + @needs_bytecode + def testRequire(self): + req = Require('Json', '1.0.3', 'json') + + assert req.name == 'Json' + assert req.module == 'json' + assert req.requested_version == '1.0.3' + assert req.attribute == '__version__' + assert req.full_name() == 'Json-1.0.3' + + from json import __version__ + assert req.get_version() == __version__ + assert req.version_ok('1.0.9') + assert not req.version_ok('0.9.1') + assert not req.version_ok('unknown') + + assert req.is_present() + assert req.is_current() + + req = Require('Json 3000', '03000', 'json', format=LooseVersion) + assert req.is_present() + assert not req.is_current() + assert not req.version_ok('unknown') + + req = Require('Do-what-I-mean', '1.0', 'd-w-i-m') + assert not req.is_present() + assert not req.is_current() + + @needs_bytecode + def test_require_present(self): + # In #1896, this test was failing for months with the only + # complaint coming from test runners (not end users). + # TODO: Evaluate if this code is needed at all. + req = Require('Tests', None, 'tests', homepage="http://example.com") + assert req.format is None + assert req.attribute is None + assert req.requested_version is None + assert req.full_name() == 'Tests' + assert req.homepage == 'http://example.com' + + from setuptools.tests import __path__ + paths = [os.path.dirname(p) for p in __path__] + assert req.is_present(paths) + assert req.is_current(paths) + + +class TestDistro: + def setup_method(self, method): + self.e1 = Extension('bar.ext', ['bar.c']) + self.e2 = Extension('c.y', ['y.c']) + + self.dist = makeSetup( + packages=['a', 'a.b', 'a.b.c', 'b', 'c'], + py_modules=['b.d', 'x'], + ext_modules=(self.e1, self.e2), + package_dir={}, + ) + + def testDistroType(self): + assert isinstance(self.dist, setuptools.dist.Distribution) + + def testExcludePackage(self): + self.dist.exclude_package('a') + assert self.dist.packages == ['b', 'c'] + + self.dist.exclude_package('b') + assert self.dist.packages == ['c'] + assert self.dist.py_modules == ['x'] + assert self.dist.ext_modules == [self.e1, self.e2] + + self.dist.exclude_package('c') + assert self.dist.packages == [] + assert self.dist.py_modules == ['x'] + assert self.dist.ext_modules == [self.e1] + + # test removals from unspecified options + makeSetup().exclude_package('x') + + def testIncludeExclude(self): + # remove an extension + self.dist.exclude(ext_modules=[self.e1]) + assert self.dist.ext_modules == [self.e2] + + # add it back in + self.dist.include(ext_modules=[self.e1]) + assert self.dist.ext_modules == [self.e2, self.e1] + + # should not add duplicate + self.dist.include(ext_modules=[self.e1]) + assert self.dist.ext_modules == [self.e2, self.e1] + + def testExcludePackages(self): + self.dist.exclude(packages=['c', 'b', 'a']) + assert self.dist.packages == [] + assert self.dist.py_modules == ['x'] + assert self.dist.ext_modules == [self.e1] + + def testEmpty(self): + dist = makeSetup() + dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) + dist = makeSetup() + dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) + + def testContents(self): + assert self.dist.has_contents_for('a') + self.dist.exclude_package('a') + assert not self.dist.has_contents_for('a') + + assert self.dist.has_contents_for('b') + self.dist.exclude_package('b') + assert not self.dist.has_contents_for('b') + + assert self.dist.has_contents_for('c') + self.dist.exclude_package('c') + assert not self.dist.has_contents_for('c') + + def testInvalidIncludeExclude(self): + with pytest.raises(DistutilsSetupError): + self.dist.include(nonexistent_option='x') + with pytest.raises(DistutilsSetupError): + self.dist.exclude(nonexistent_option='x') + with pytest.raises(DistutilsSetupError): + self.dist.include(packages={'x': 'y'}) + with pytest.raises(DistutilsSetupError): + self.dist.exclude(packages={'x': 'y'}) + with pytest.raises(DistutilsSetupError): + self.dist.include(ext_modules={'x': 'y'}) + with pytest.raises(DistutilsSetupError): + self.dist.exclude(ext_modules={'x': 'y'}) + + with pytest.raises(DistutilsSetupError): + self.dist.include(package_dir=['q']) + with pytest.raises(DistutilsSetupError): + self.dist.exclude(package_dir=['q']) + + +class TestCommandTests: + def testTestIsCommand(self): + test_cmd = makeSetup().get_command_obj('test') + assert (isinstance(test_cmd, distutils.cmd.Command)) + + def testLongOptSuiteWNoDefault(self): + ts1 = makeSetup(script_args=['test', '--test-suite=foo.tests.suite']) + ts1 = ts1.get_command_obj('test') + ts1.ensure_finalized() + assert ts1.test_suite == 'foo.tests.suite' + + def testDefaultSuite(self): + ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') + ts2.ensure_finalized() + assert ts2.test_suite == 'bar.tests.suite' + + def testDefaultWModuleOnCmdLine(self): + ts3 = makeSetup( + test_suite='bar.tests', + script_args=['test', '-m', 'foo.tests'] + ).get_command_obj('test') + ts3.ensure_finalized() + assert ts3.test_module == 'foo.tests' + assert ts3.test_suite == 'foo.tests.test_suite' + + def testConflictingOptions(self): + ts4 = makeSetup( + script_args=['test', '-m', 'bar.tests', '-s', 'foo.tests.suite'] + ).get_command_obj('test') + with pytest.raises(DistutilsOptionError): + ts4.ensure_finalized() + + def testNoSuite(self): + ts5 = makeSetup().get_command_obj('test') + ts5.ensure_finalized() + assert ts5.test_suite is None + + +@pytest.fixture +def example_source(tmpdir): + tmpdir.mkdir('foo') + (tmpdir / 'foo/bar.py').write('') + (tmpdir / 'readme.txt').write('') + return tmpdir + + +def test_findall(example_source): + found = list(setuptools.findall(str(example_source))) + expected = ['readme.txt', 'foo/bar.py'] + expected = [example_source.join(fn) for fn in expected] + assert found == expected + + +def test_findall_curdir(example_source): + with example_source.as_cwd(): + found = list(setuptools.findall()) + expected = ['readme.txt', os.path.join('foo', 'bar.py')] + assert found == expected + + +@pytest.fixture +def can_symlink(tmpdir): + """ + Skip if cannot create a symbolic link + """ + link_fn = 'link' + target_fn = 'target' + try: + os.symlink(target_fn, link_fn) + except (OSError, NotImplementedError, AttributeError): + pytest.skip("Cannot create symbolic links") + os.remove(link_fn) + + +def test_findall_missing_symlink(tmpdir, can_symlink): + with tmpdir.as_cwd(): + os.symlink('foo', 'bar') + found = list(setuptools.findall()) + assert found == [] diff --git a/setuptools/tests/test_sphinx_upload_docs.py b/setuptools/tests/test_sphinx_upload_docs.py new file mode 100644 index 00000000..cc5b8293 --- /dev/null +++ b/setuptools/tests/test_sphinx_upload_docs.py @@ -0,0 +1,38 @@ +import pytest + +from jaraco import path + +from setuptools.command.upload_docs import upload_docs +from setuptools.dist import Distribution + + +@pytest.fixture +def sphinx_doc_sample_project(tmpdir_cwd): + path.build({ + 'setup.py': 'from setuptools import setup; setup()', + 'build': { + 'docs': { + 'conf.py': 'project="test"', + 'index.rst': ".. toctree::\ + :maxdepth: 2\ + :caption: Contents:", + }, + }, + }) + + +@pytest.mark.usefixtures('sphinx_doc_sample_project') +class TestSphinxUploadDocs: + def test_sphinx_doc(self): + params = dict( + name='foo', + packages=['test'], + ) + dist = Distribution(params) + + cmd = upload_docs(dist) + + cmd.initialize_options() + assert cmd.upload_dir is None + assert cmd.has_sphinx() is True + cmd.finalize_options() diff --git a/setuptools/tests/test_test.py b/setuptools/tests/test_test.py new file mode 100644 index 00000000..6bce8e20 --- /dev/null +++ b/setuptools/tests/test_test.py @@ -0,0 +1,108 @@ +from distutils import log +import os + +import pytest + +from setuptools.command.test import test +from setuptools.dist import Distribution + +from .textwrap import DALS + + +SETUP_PY = DALS( + """ + from setuptools import setup + + setup(name='foo', + packages=['name', 'name.space', 'name.space.tests'], + namespace_packages=['name'], + test_suite='name.space.tests.test_suite', + ) + """ +) + +NS_INIT = DALS( + """ + # -*- coding: Latin-1 -*- + # Söme Arbiträry Ünicode to test Distribute Issüé 310 + try: + __import__('pkg_resources').declare_namespace(__name__) + except ImportError: + from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) + """ +) + +TEST_PY = DALS( + """ + import unittest + + class TestTest(unittest.TestCase): + def test_test(self): + print "Foo" # Should fail under Python 3 + + test_suite = unittest.makeSuite(TestTest) + """ +) + + +@pytest.fixture +def sample_test(tmpdir_cwd): + os.makedirs('name/space/tests') + + # setup.py + with open('setup.py', 'wt') as f: + f.write(SETUP_PY) + + # name/__init__.py + with open('name/__init__.py', 'wb') as f: + f.write(NS_INIT.encode('Latin-1')) + + # name/space/__init__.py + with open('name/space/__init__.py', 'wt') as f: + f.write('#empty\n') + + # name/space/tests/__init__.py + with open('name/space/tests/__init__.py', 'wt') as f: + f.write(TEST_PY) + + +@pytest.fixture +def quiet_log(): + # Running some of the other tests will automatically + # change the log level to info, messing our output. + log.set_verbosity(0) + + +@pytest.mark.usefixtures('tmpdir_cwd', 'quiet_log') +def test_tests_are_run_once(capfd): + params = dict( + name='foo', + packages=['dummy'], + ) + with open('setup.py', 'wt') as f: + f.write('from setuptools import setup; setup(\n') + for k, v in sorted(params.items()): + f.write(' %s=%r,\n' % (k, v)) + f.write(')\n') + os.makedirs('dummy') + with open('dummy/__init__.py', 'wt'): + pass + with open('dummy/test_dummy.py', 'wt') as f: + f.write( + DALS( + """ + import unittest + class TestTest(unittest.TestCase): + def test_test(self): + print('Foo') + """ + ) + ) + dist = Distribution(params) + dist.script_name = 'setup.py' + cmd = test(dist) + cmd.ensure_finalized() + cmd.run() + out, err = capfd.readouterr() + assert out == 'Foo\n' diff --git a/setuptools/tests/test_unicode_utils.py b/setuptools/tests/test_unicode_utils.py new file mode 100644 index 00000000..a24a9bd5 --- /dev/null +++ b/setuptools/tests/test_unicode_utils.py @@ -0,0 +1,10 @@ +from setuptools import unicode_utils + + +def test_filesys_decode_fs_encoding_is_None(monkeypatch): + """ + Test filesys_decode does not raise TypeError when + getfilesystemencoding returns None. + """ + monkeypatch.setattr('sys.getfilesystemencoding', lambda: None) + unicode_utils.filesys_decode(b'test') diff --git a/setuptools/tests/test_upload.py b/setuptools/tests/test_upload.py new file mode 100644 index 00000000..7586cb26 --- /dev/null +++ b/setuptools/tests/test_upload.py @@ -0,0 +1,22 @@ +from setuptools.command.upload import upload +from setuptools.dist import Distribution +from setuptools.errors import RemovedCommandError + +try: + from unittest import mock +except ImportError: + import mock + +import pytest + + +class TestUpload: + def test_upload_exception(self): + """Ensure that the register command has been properly removed.""" + dist = Distribution() + dist.dist_files = [(mock.Mock(), mock.Mock(), mock.Mock())] + + cmd = upload(dist) + + with pytest.raises(RemovedCommandError): + cmd.run() diff --git a/setuptools/tests/test_upload_docs.py b/setuptools/tests/test_upload_docs.py new file mode 100644 index 00000000..55978aad --- /dev/null +++ b/setuptools/tests/test_upload_docs.py @@ -0,0 +1,64 @@ +import os +import zipfile +import contextlib + +import pytest +from jaraco import path + +from setuptools.command.upload_docs import upload_docs +from setuptools.dist import Distribution + +from .textwrap import DALS +from . import contexts + + +@pytest.fixture +def sample_project(tmpdir_cwd): + path.build({ + 'setup.py': DALS(""" + from setuptools import setup + + setup(name='foo') + """), + 'build': { + 'index.html': 'Hello world.', + 'empty': {}, + } + }) + + +@pytest.mark.usefixtures('sample_project') +@pytest.mark.usefixtures('user_override') +class TestUploadDocsTest: + def test_create_zipfile(self): + """ + Ensure zipfile creation handles common cases, including a folder + containing an empty folder. + """ + + dist = Distribution() + + cmd = upload_docs(dist) + cmd.target_dir = cmd.upload_dir = 'build' + with contexts.tempdir() as tmp_dir: + tmp_file = os.path.join(tmp_dir, 'foo.zip') + zip_file = cmd.create_zipfile(tmp_file) + + assert zipfile.is_zipfile(tmp_file) + + with contextlib.closing(zipfile.ZipFile(tmp_file)) as zip_file: + assert zip_file.namelist() == ['index.html'] + + def test_build_multipart(self): + data = dict( + a="foo", + b="bar", + file=('file.txt', b'content'), + ) + body, content_type = upload_docs._build_multipart(data) + assert 'form-data' in content_type + assert "b'" not in content_type + assert 'b"' not in content_type + assert isinstance(body, bytes) + assert b'foo' in body + assert b'content' in body diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py new file mode 100644 index 00000000..00f5f185 --- /dev/null +++ b/setuptools/tests/test_virtualenv.py @@ -0,0 +1,209 @@ +import glob +import os +import sys +import itertools + +import pathlib + +import pytest +from pytest_fixture_config import yield_requires_config + +import pytest_virtualenv + +from .textwrap import DALS +from .test_easy_install import make_nspkg_sdist + + +@pytest.fixture(autouse=True) +def pytest_virtualenv_works(virtualenv): + """ + pytest_virtualenv may not work. if it doesn't, skip these + tests. See #1284. + """ + venv_prefix = virtualenv.run( + 'python -c "import sys; print(sys.prefix)"', + capture=True, + ).strip() + if venv_prefix == sys.prefix: + pytest.skip("virtualenv is broken (see pypa/setuptools#1284)") + + +@yield_requires_config(pytest_virtualenv.CONFIG, ['virtualenv_executable']) +@pytest.fixture(scope='function') +def bare_virtualenv(): + """ Bare virtualenv (no pip/setuptools/wheel). + """ + with pytest_virtualenv.VirtualEnv(args=( + '--no-wheel', + '--no-pip', + '--no-setuptools', + )) as venv: + yield venv + + +def test_clean_env_install(bare_virtualenv, tmp_src): + """ + Check setuptools can be installed in a clean environment. + """ + cmd = [bare_virtualenv.python, 'setup.py', 'install'] + bare_virtualenv.run(cmd, cd=tmp_src) + + +def _get_pip_versions(): + # This fixture will attempt to detect if tests are being run without + # network connectivity and if so skip some tests + + network = True + if not os.environ.get('NETWORK_REQUIRED', False): # pragma: nocover + try: + from urllib.request import urlopen + from urllib.error import URLError + except ImportError: + from urllib2 import urlopen, URLError # Python 2.7 compat + + try: + urlopen('https://pypi.org', timeout=1) + except URLError: + # No network, disable most of these tests + network = False + + def mark(param, *marks): + if not isinstance(param, type(pytest.param(''))): + param = pytest.param(param) + return param._replace(marks=param.marks + marks) + + def skip_network(param): + return param if network else mark(param, pytest.mark.skip(reason="no network")) + + network_versions = [ + mark('pip<20', pytest.mark.xfail(reason='pypa/pip#6599')), + 'pip<20.1', + 'pip<21', + 'pip<22', + mark( + 'https://github.com/pypa/pip/archive/main.zip', + pytest.mark.skipif('sys.version_info < (3, 7)'), + ), + ] + + versions = itertools.chain( + [None], + map(skip_network, network_versions) + ) + + return list(versions) + + +@pytest.mark.skipif( + 'platform.python_implementation() == "PyPy"', + reason="https://github.com/pypa/setuptools/pull/2865#issuecomment-965834995", +) +@pytest.mark.parametrize('pip_version', _get_pip_versions()) +def test_pip_upgrade_from_source(pip_version, tmp_src, virtualenv): + """ + Check pip can upgrade setuptools from source. + """ + # Install pip/wheel, and remove setuptools (as it + # should not be needed for bootstraping from source) + if pip_version is None: + upgrade_pip = () + else: + upgrade_pip = ('python -m pip install -U "{pip_version}" --retries=1',) + virtualenv.run(' && '.join(( + 'pip uninstall -y setuptools', + 'pip install -U wheel', + ) + upgrade_pip).format(pip_version=pip_version)) + dist_dir = virtualenv.workspace + # Generate source distribution / wheel. + virtualenv.run(' && '.join(( + 'python setup.py -q sdist -d {dist}', + 'python setup.py -q bdist_wheel -d {dist}', + )).format(dist=dist_dir), cd=tmp_src) + sdist = glob.glob(os.path.join(dist_dir, '*.zip'))[0] + wheel = glob.glob(os.path.join(dist_dir, '*.whl'))[0] + # Then update from wheel. + virtualenv.run('pip install ' + wheel) + # And finally try to upgrade from source. + virtualenv.run('pip install --no-cache-dir --upgrade ' + sdist) + + +def _check_test_command_install_requirements(virtualenv, tmpdir, cwd): + """ + Check the test command will install all required dependencies. + """ + # Install setuptools. + virtualenv.run('python setup.py develop', cd=cwd) + + def sdist(distname, version): + dist_path = tmpdir.join('%s-%s.tar.gz' % (distname, version)) + make_nspkg_sdist(str(dist_path), distname, version) + return dist_path + dependency_links = [ + pathlib.Path(str(dist_path)).as_uri() + for dist_path in ( + sdist('foobar', '2.4'), + sdist('bits', '4.2'), + sdist('bobs', '6.0'), + sdist('pieces', '0.6'), + ) + ] + with tmpdir.join('setup.py').open('w') as fp: + fp.write(DALS( + ''' + from setuptools import setup + + setup( + dependency_links={dependency_links!r}, + install_requires=[ + 'barbazquux1; sys_platform in ""', + 'foobar==2.4', + ], + setup_requires='bits==4.2', + tests_require=""" + bobs==6.0 + """, + extras_require={{ + 'test': ['barbazquux2'], + ':"" in sys_platform': 'pieces==0.6', + ':python_version > "1"': """ + pieces + foobar + """, + }} + ) + '''.format(dependency_links=dependency_links))) + with tmpdir.join('test.py').open('w') as fp: + fp.write(DALS( + ''' + import foobar + import bits + import bobs + import pieces + + open('success', 'w').close() + ''')) + # Run test command for test package. + # use 'virtualenv.python' as workaround for man-group/pytest-plugins#166 + cmd = [virtualenv.python, 'setup.py', 'test', '-s', 'test'] + virtualenv.run(cmd, cd=str(tmpdir)) + assert tmpdir.join('success').check() + + +def test_test_command_install_requirements(virtualenv, tmpdir, request): + # Ensure pip/wheel packages are installed. + virtualenv.run( + "python -c \"__import__('pkg_resources').require(['pip', 'wheel'])\"") + # uninstall setuptools so that 'setup.py develop' works + virtualenv.run("python -m pip uninstall -y setuptools") + # disable index URL so bits and bobs aren't requested from PyPI + virtualenv.env['PIP_NO_INDEX'] = '1' + _check_test_command_install_requirements(virtualenv, tmpdir, request.config.rootdir) + + +def test_no_missing_dependencies(bare_virtualenv, request): + """ + Quick and dirty test to ensure all external dependencies are vendored. + """ + for command in ('upload',): # sorted(distutils.command.__all__): + cmd = [bare_virtualenv.python, 'setup.py', command, '-h'] + bare_virtualenv.run(cmd, cd=request.config.rootdir) diff --git a/setuptools/tests/test_wheel.py b/setuptools/tests/test_wheel.py new file mode 100644 index 00000000..7345b135 --- /dev/null +++ b/setuptools/tests/test_wheel.py @@ -0,0 +1,583 @@ +# -*- coding: utf-8 -*- + +"""wheel tests +""" + +from distutils.sysconfig import get_config_var +from distutils.util import get_platform +import contextlib +import glob +import inspect +import os +import shutil +import subprocess +import sys +import zipfile + +import pytest +from jaraco import path + +from pkg_resources import Distribution, PathMetadata, PY_MAJOR +from setuptools.extern.packaging.utils import canonicalize_name +from setuptools.extern.packaging.tags import parse_tag +from setuptools.wheel import Wheel + +from .contexts import tempdir +from .textwrap import DALS + + +WHEEL_INFO_TESTS = ( + ('invalid.whl', ValueError), + ('simplewheel-2.0-1-py2.py3-none-any.whl', { + 'project_name': 'simplewheel', + 'version': '2.0', + 'build': '1', + 'py_version': 'py2.py3', + 'abi': 'none', + 'platform': 'any', + }), + ('simple.dist-0.1-py2.py3-none-any.whl', { + 'project_name': 'simple.dist', + 'version': '0.1', + 'build': None, + 'py_version': 'py2.py3', + 'abi': 'none', + 'platform': 'any', + }), + ('example_pkg_a-1-py3-none-any.whl', { + 'project_name': 'example_pkg_a', + 'version': '1', + 'build': None, + 'py_version': 'py3', + 'abi': 'none', + 'platform': 'any', + }), + ('PyQt5-5.9-5.9.1-cp35.cp36.cp37-abi3-manylinux1_x86_64.whl', { + 'project_name': 'PyQt5', + 'version': '5.9', + 'build': '5.9.1', + 'py_version': 'cp35.cp36.cp37', + 'abi': 'abi3', + 'platform': 'manylinux1_x86_64', + }), +) + + +@pytest.mark.parametrize( + ('filename', 'info'), WHEEL_INFO_TESTS, + ids=[t[0] for t in WHEEL_INFO_TESTS] +) +def test_wheel_info(filename, info): + if inspect.isclass(info): + with pytest.raises(info): + Wheel(filename) + return + w = Wheel(filename) + assert {k: getattr(w, k) for k in info.keys()} == info + + +@contextlib.contextmanager +def build_wheel(extra_file_defs=None, **kwargs): + file_defs = { + 'setup.py': (DALS( + ''' + # -*- coding: utf-8 -*- + from setuptools import setup + import setuptools + setup(**%r) + ''' + ) % kwargs).encode('utf-8'), + } + if extra_file_defs: + file_defs.update(extra_file_defs) + with tempdir() as source_dir: + path.build(file_defs, source_dir) + subprocess.check_call((sys.executable, 'setup.py', + '-q', 'bdist_wheel'), cwd=source_dir) + yield glob.glob(os.path.join(source_dir, 'dist', '*.whl'))[0] + + +def tree_set(root): + contents = set() + for dirpath, dirnames, filenames in os.walk(root): + for filename in filenames: + contents.add(os.path.join(os.path.relpath(dirpath, root), + filename)) + return contents + + +def flatten_tree(tree): + """Flatten nested dicts and lists into a full list of paths""" + output = set() + for node, contents in tree.items(): + if isinstance(contents, dict): + contents = flatten_tree(contents) + + for elem in contents: + if isinstance(elem, dict): + output |= {os.path.join(node, val) + for val in flatten_tree(elem)} + else: + output.add(os.path.join(node, elem)) + return output + + +def format_install_tree(tree): + return { + x.format( + py_version=PY_MAJOR, + platform=get_platform(), + shlib_ext=get_config_var('EXT_SUFFIX') or get_config_var('SO')) + for x in tree} + + +def _check_wheel_install(filename, install_dir, install_tree_includes, + project_name, version, requires_txt): + w = Wheel(filename) + egg_path = os.path.join(install_dir, w.egg_name()) + w.install_as_egg(egg_path) + if install_tree_includes is not None: + install_tree = format_install_tree(install_tree_includes) + exp = tree_set(install_dir) + assert install_tree.issubset(exp), (install_tree - exp) + + metadata = PathMetadata(egg_path, os.path.join(egg_path, 'EGG-INFO')) + dist = Distribution.from_filename(egg_path, metadata=metadata) + assert dist.project_name == project_name + assert dist.version == version + if requires_txt is None: + assert not dist.has_metadata('requires.txt') + else: + assert requires_txt == dist.get_metadata('requires.txt').lstrip() + + +class Record: + + def __init__(self, id, **kwargs): + self._id = id + self._fields = kwargs + + def __repr__(self): + return '%s(**%r)' % (self._id, self._fields) + + +WHEEL_INSTALL_TESTS = ( + + dict( + id='basic', + file_defs={ + 'foo': { + '__init__.py': '' + } + }, + setup_kwargs=dict( + packages=['foo'], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt' + ], + 'foo': ['__init__.py'] + } + }), + ), + + dict( + id='utf-8', + setup_kwargs=dict( + description='Description accentuée', + ) + ), + + dict( + id='data', + file_defs={ + 'data.txt': DALS( + ''' + Some data... + ''' + ), + }, + setup_kwargs=dict( + data_files=[('data_dir', ['data.txt'])], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt' + ], + 'data_dir': [ + 'data.txt' + ] + } + }), + ), + + dict( + id='extension', + file_defs={ + 'extension.c': DALS( + ''' + #include "Python.h" + + #if PY_MAJOR_VERSION >= 3 + + static struct PyModuleDef moduledef = { + PyModuleDef_HEAD_INIT, + "extension", + NULL, + 0, + NULL, + NULL, + NULL, + NULL, + NULL + }; + + #define INITERROR return NULL + + PyMODINIT_FUNC PyInit_extension(void) + + #else + + #define INITERROR return + + void initextension(void) + + #endif + { + #if PY_MAJOR_VERSION >= 3 + PyObject *module = PyModule_Create(&moduledef); + #else + PyObject *module = Py_InitModule("extension", NULL); + #endif + if (module == NULL) + INITERROR; + #if PY_MAJOR_VERSION >= 3 + return module; + #endif + } + ''' + ), + }, + setup_kwargs=dict( + ext_modules=[ + Record('setuptools.Extension', + name='extension', + sources=['extension.c']) + ], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}-{platform}.egg': [ + 'extension{shlib_ext}', + {'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + ]}, + ] + }), + ), + + dict( + id='header', + file_defs={ + 'header.h': DALS( + ''' + ''' + ), + }, + setup_kwargs=dict( + headers=['header.h'], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': [ + 'header.h', + {'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + ]}, + ] + }), + ), + + dict( + id='script', + file_defs={ + 'script.py': DALS( + ''' + #/usr/bin/python + print('hello world!') + ''' + ), + 'script.sh': DALS( + ''' + #/bin/sh + echo 'hello world!' + ''' + ), + }, + setup_kwargs=dict( + scripts=['script.py', 'script.sh'], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + {'scripts': [ + 'script.py', + 'script.sh' + ]} + + ] + } + }) + ), + + dict( + id='requires1', + install_requires='foobar==2.0', + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'requires.txt', + 'top_level.txt', + ] + } + }), + requires_txt=DALS( + ''' + foobar==2.0 + ''' + ), + ), + + dict( + id='requires2', + install_requires=''' + bar + foo<=2.0; %r in sys_platform + ''' % sys.platform, + requires_txt=DALS( + ''' + bar + foo<=2.0 + ''' + ), + ), + + dict( + id='requires3', + install_requires=''' + bar; %r != sys_platform + ''' % sys.platform, + ), + + dict( + id='requires4', + install_requires=''' + foo + ''', + extras_require={ + 'extra': 'foobar>3', + }, + requires_txt=DALS( + ''' + foo + + [extra] + foobar>3 + ''' + ), + ), + + dict( + id='requires5', + extras_require={ + 'extra': 'foobar; %r != sys_platform' % sys.platform, + }, + requires_txt=DALS( + ''' + [extra] + ''' + ), + ), + + dict( + id='namespace_package', + file_defs={ + 'foo': { + 'bar': { + '__init__.py': '' + }, + }, + }, + setup_kwargs=dict( + namespace_packages=['foo'], + packages=['foo.bar'], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': [ + 'foo-1.0-py{py_version}-nspkg.pth', + {'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'namespace_packages.txt', + 'top_level.txt', + ]}, + {'foo': [ + '__init__.py', + {'bar': ['__init__.py']}, + ]}, + ] + }), + ), + + dict( + id='empty_namespace_package', + file_defs={ + 'foobar': { + '__init__.py': + "__import__('pkg_resources').declare_namespace(__name__)", + }, + }, + setup_kwargs=dict( + namespace_packages=['foobar'], + packages=['foobar'], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': [ + 'foo-1.0-py{py_version}-nspkg.pth', + {'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'namespace_packages.txt', + 'top_level.txt', + ]}, + {'foobar': [ + '__init__.py', + ]}, + ] + }), + ), + + dict( + id='data_in_package', + file_defs={ + 'foo': { + '__init__.py': '', + 'data_dir': { + 'data.txt': DALS( + ''' + Some data... + ''' + ), + } + } + }, + setup_kwargs=dict( + packages=['foo'], + data_files=[('foo/data_dir', ['foo/data_dir/data.txt'])], + ), + install_tree=flatten_tree({ + 'foo-1.0-py{py_version}.egg': { + 'EGG-INFO': [ + 'PKG-INFO', + 'RECORD', + 'WHEEL', + 'top_level.txt', + ], + 'foo': [ + '__init__.py', + {'data_dir': [ + 'data.txt', + ]} + ] + } + }), + ), + +) + + +@pytest.mark.parametrize( + 'params', WHEEL_INSTALL_TESTS, + ids=list(params['id'] for params in WHEEL_INSTALL_TESTS), +) +def test_wheel_install(params): + project_name = params.get('name', 'foo') + version = params.get('version', '1.0') + install_requires = params.get('install_requires', []) + extras_require = params.get('extras_require', {}) + requires_txt = params.get('requires_txt', None) + install_tree = params.get('install_tree') + file_defs = params.get('file_defs', {}) + setup_kwargs = params.get('setup_kwargs', {}) + with build_wheel( + name=project_name, + version=version, + install_requires=install_requires, + extras_require=extras_require, + extra_file_defs=file_defs, + **setup_kwargs + ) as filename, tempdir() as install_dir: + _check_wheel_install(filename, install_dir, + install_tree, project_name, + version, requires_txt) + + +def test_wheel_install_pep_503(): + project_name = 'Foo_Bar' # PEP 503 canonicalized name is "foo-bar" + version = '1.0' + with build_wheel( + name=project_name, + version=version, + ) as filename, tempdir() as install_dir: + new_filename = filename.replace(project_name, + canonicalize_name(project_name)) + shutil.move(filename, new_filename) + _check_wheel_install(new_filename, install_dir, None, + canonicalize_name(project_name), + version, None) + + +def test_wheel_no_dist_dir(): + project_name = 'nodistinfo' + version = '1.0' + wheel_name = '{0}-{1}-py2.py3-none-any.whl'.format(project_name, version) + with tempdir() as source_dir: + wheel_path = os.path.join(source_dir, wheel_name) + # create an empty zip file + zipfile.ZipFile(wheel_path, 'w').close() + with tempdir() as install_dir: + with pytest.raises(ValueError): + _check_wheel_install(wheel_path, install_dir, None, + project_name, + version, None) + + +def test_wheel_is_compatible(monkeypatch): + def sys_tags(): + for t in parse_tag('cp36-cp36m-manylinux1_x86_64'): + yield t + monkeypatch.setattr('setuptools.wheel.sys_tags', sys_tags) + assert Wheel( + 'onnxruntime-0.1.2-cp36-cp36m-manylinux1_x86_64.whl').is_compatible() diff --git a/setuptools/tests/test_windows_wrappers.py b/setuptools/tests/test_windows_wrappers.py new file mode 100644 index 00000000..8ac9bd07 --- /dev/null +++ b/setuptools/tests/test_windows_wrappers.py @@ -0,0 +1,197 @@ +""" +Python Script Wrapper for Windows +================================= + +setuptools includes wrappers for Python scripts that allows them to be +executed like regular windows programs. There are 2 wrappers, one +for command-line programs, cli.exe, and one for graphical programs, +gui.exe. These programs are almost identical, function pretty much +the same way, and are generated from the same source file. The +wrapper programs are used by copying them to the directory containing +the script they are to wrap and with the same name as the script they +are to wrap. +""" + +import sys +import platform +import textwrap +import subprocess + +import pytest + +from setuptools.command.easy_install import nt_quote_arg +import pkg_resources + +pytestmark = pytest.mark.skipif(sys.platform != 'win32', reason="Windows only") + + +class WrapperTester: + @classmethod + def prep_script(cls, template): + python_exe = nt_quote_arg(sys.executable) + return template % locals() + + @classmethod + def create_script(cls, tmpdir): + """ + Create a simple script, foo-script.py + + Note that the script starts with a Unix-style '#!' line saying which + Python executable to run. The wrapper will use this line to find the + correct Python executable. + """ + + script = cls.prep_script(cls.script_tmpl) + + with (tmpdir / cls.script_name).open('w') as f: + f.write(script) + + # also copy cli.exe to the sample directory + with (tmpdir / cls.wrapper_name).open('wb') as f: + w = pkg_resources.resource_string('setuptools', cls.wrapper_source) + f.write(w) + + +def win_launcher_exe(prefix): + """ A simple routine to select launcher script based on platform.""" + assert prefix in ('cli', 'gui') + if platform.machine() == "ARM64": + return "{}-arm64.exe".format(prefix) + else: + return "{}-32.exe".format(prefix) + + +class TestCLI(WrapperTester): + script_name = 'foo-script.py' + wrapper_name = 'foo.exe' + wrapper_source = win_launcher_exe('cli') + + script_tmpl = textwrap.dedent(""" + #!%(python_exe)s + import sys + input = repr(sys.stdin.read()) + print(sys.argv[0][-14:]) + print(sys.argv[1:]) + print(input) + if __debug__: + print('non-optimized') + """).lstrip() + + def test_basic(self, tmpdir): + """ + When the copy of cli.exe, foo.exe in this example, runs, it examines + the path name it was run with and computes a Python script path name + by removing the '.exe' suffix and adding the '-script.py' suffix. (For + GUI programs, the suffix '-script.pyw' is added.) This is why we + named out script the way we did. Now we can run out script by running + the wrapper: + + This example was a little pathological in that it exercised windows + (MS C runtime) quoting rules: + + - Strings containing spaces are surrounded by double quotes. + + - Double quotes in strings need to be escaped by preceding them with + back slashes. + + - One or more backslashes preceding double quotes need to be escaped + by preceding each of them with back slashes. + """ + self.create_script(tmpdir) + cmd = [ + str(tmpdir / 'foo.exe'), + 'arg1', + 'arg 2', + 'arg "2\\"', + 'arg 4\\', + 'arg5 a\\\\b', + ] + proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) + stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii')) + actual = stdout.decode('ascii').replace('\r\n', '\n') + expected = textwrap.dedent(r""" + \foo-script.py + ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b'] + 'hello\nworld\n' + non-optimized + """).lstrip() + assert actual == expected + + def test_with_options(self, tmpdir): + """ + Specifying Python Command-line Options + -------------------------------------- + + You can specify a single argument on the '#!' line. This can be used + to specify Python options like -O, to run in optimized mode or -i + to start the interactive interpreter. You can combine multiple + options as usual. For example, to run in optimized mode and + enter the interpreter after running the script, you could use -Oi: + """ + self.create_script(tmpdir) + tmpl = textwrap.dedent(""" + #!%(python_exe)s -Oi + import sys + input = repr(sys.stdin.read()) + print(sys.argv[0][-14:]) + print(sys.argv[1:]) + print(input) + if __debug__: + print('non-optimized') + sys.ps1 = '---' + """).lstrip() + with (tmpdir / 'foo-script.py').open('w') as f: + f.write(self.prep_script(tmpl)) + cmd = [str(tmpdir / 'foo.exe')] + proc = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stdin=subprocess.PIPE, + stderr=subprocess.STDOUT) + stdout, stderr = proc.communicate() + actual = stdout.decode('ascii').replace('\r\n', '\n') + expected = textwrap.dedent(r""" + \foo-script.py + [] + '' + --- + """).lstrip() + assert actual == expected + + +class TestGUI(WrapperTester): + """ + Testing the GUI Version + ----------------------- + """ + script_name = 'bar-script.pyw' + wrapper_source = win_launcher_exe('gui') + wrapper_name = 'bar.exe' + + script_tmpl = textwrap.dedent(""" + #!%(python_exe)s + import sys + f = open(sys.argv[1], 'wb') + bytes_written = f.write(repr(sys.argv[2]).encode('utf-8')) + f.close() + """).strip() + + def test_basic(self, tmpdir): + """Test the GUI version with the simple script, bar-script.py""" + self.create_script(tmpdir) + + cmd = [ + str(tmpdir / 'bar.exe'), + str(tmpdir / 'test_output.txt'), + 'Test Argument', + ] + proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, + stderr=subprocess.STDOUT) + stdout, stderr = proc.communicate() + assert not stdout + assert not stderr + with (tmpdir / 'test_output.txt').open('rb') as f_out: + actual = f_out.read().decode('ascii') + assert actual == repr('Test Argument') diff --git a/setuptools/tests/text.py b/setuptools/tests/text.py new file mode 100644 index 00000000..e05cc633 --- /dev/null +++ b/setuptools/tests/text.py @@ -0,0 +1,4 @@ +class Filenames: + unicode = 'smörbröd.py' + latin_1 = unicode.encode('latin-1') + utf_8 = unicode.encode('utf-8') diff --git a/setuptools/tests/textwrap.py b/setuptools/tests/textwrap.py new file mode 100644 index 00000000..5e39618d --- /dev/null +++ b/setuptools/tests/textwrap.py @@ -0,0 +1,6 @@ +import textwrap + + +def DALS(s): + "dedent and left-strip" + return textwrap.dedent(s).lstrip() |