diff options
author | Jason R. Coombs <jaraco@jaraco.com> | 2016-09-27 14:24:22 -0500 |
---|---|---|
committer | Jason R. Coombs <jaraco@jaraco.com> | 2016-09-27 14:24:22 -0500 |
commit | 66a6724da8eda3336643dee086da2a3495e6422a (patch) | |
tree | 64043e9782491bde3a3a9ae2314cc59451a6c9c0 /setuptools | |
parent | df3905616933c90af95e99f705b800a2f5c1c921 (diff) | |
parent | 35ea365b50bd1a64375fdbcce187affab22af3b7 (diff) | |
download | python-setuptools-git-setuptools-scm.tar.gz |
Merge with mastersetuptools-scm
Diffstat (limited to 'setuptools')
54 files changed, 2942 insertions, 688 deletions
diff --git a/setuptools/__init__.py b/setuptools/__init__.py index b8cec4c3..42f6a5d6 100644 --- a/setuptools/__init__.py +++ b/setuptools/__init__.py @@ -4,20 +4,20 @@ import os import functools import distutils.core import distutils.filelist -from distutils.core import Command as _Command from distutils.util import convert_path from fnmatch import fnmatchcase -from setuptools.extern.six.moves import filterfalse, map +from setuptools.extern.six.moves import filter, filterfalse, map import pkg_resources from setuptools.extension import Extension -from setuptools.dist import Distribution, Feature, _get_unpatched +from setuptools.dist import Distribution, Feature from setuptools.depends import Require +from . import monkey __all__ = [ 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' + 'find_packages', ] __version__ = pkg_resources.require('setuptools')[0].version @@ -32,12 +32,18 @@ lib2to3_fixer_packages = ['lib2to3.fixes'] class PackageFinder(object): + """ + Generate a list of all Python packages found within a directory + """ + @classmethod def find(cls, where='.', exclude=(), include=('*',)): """Return a list all Python packages found within directory 'where' - 'where' should be supplied as a "cross-platform" (i.e. URL-style) - path; it will be converted to the appropriate local path syntax. + 'where' is the root directory which will be searched for packages. It + should be supplied as a "cross-platform" (i.e. URL-style) path; it will + be converted to the appropriate local path syntax. + 'exclude' is a sequence of package names to exclude; '*' can be used as a wildcard in the names, such that 'foo.*' will exclude all subpackages of 'foo' (but not 'foo' itself). @@ -46,78 +52,64 @@ class PackageFinder(object): specified, only the named packages will be included. If it's not specified, all found packages will be included. 'include' can contain shell style wildcard patterns just like 'exclude'. - - The list of included packages is built up first and then any - explicitly excluded packages are removed from it. """ - out = cls._find_packages_iter(convert_path(where)) - out = cls.require_parents(out) - includes = cls._build_filter(*include) - excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude) - out = filter(includes, out) - out = filterfalse(excludes, out) - return list(out) - @staticmethod - def require_parents(packages): - """ - Exclude any apparent package that apparently doesn't include its - parent. - - For example, exclude 'foo.bar' if 'foo' is not present. - """ - found = [] - for pkg in packages: - base, sep, child = pkg.rpartition('.') - if base and base not in found: - continue - found.append(pkg) - yield pkg + return list(cls._find_packages_iter( + convert_path(where), + cls._build_filter('ez_setup', '*__pycache__', *exclude), + cls._build_filter(*include))) - @staticmethod - def _candidate_dirs(base_path): + @classmethod + def _find_packages_iter(cls, where, exclude, include): """ - Return all dirs in base_path that might be packages. + All the packages found in 'where' that pass the 'include' filter, but + not the 'exclude' filter. """ - has_dot = lambda name: '.' in name - for root, dirs, files in os.walk(base_path, followlinks=True): - # Exclude directories that contain a period, as they cannot be - # packages. Mutate the list to avoid traversal. - dirs[:] = filterfalse(has_dot, dirs) - for dir in dirs: - yield os.path.relpath(os.path.join(root, dir), base_path) - - @classmethod - def _find_packages_iter(cls, base_path): - candidates = cls._candidate_dirs(base_path) - return ( - path.replace(os.path.sep, '.') - for path in candidates - if cls._looks_like_package(os.path.join(base_path, path)) - ) + for root, dirs, files in os.walk(where, followlinks=True): + # Copy dirs to iterate over it, then empty dirs. + all_dirs = dirs[:] + dirs[:] = [] + + for dir in all_dirs: + full_path = os.path.join(root, dir) + rel_path = os.path.relpath(full_path, where) + package = rel_path.replace(os.path.sep, '.') + + # Check if the directory is a package and passes the filters + if ('.' not in dir + and include(package) + and not exclude(package) + and cls._looks_like_package(full_path)): + yield package + dirs.append(dir) @staticmethod def _looks_like_package(path): + """Does a directory look like a package?""" return os.path.isfile(os.path.join(path, '__init__.py')) @staticmethod def _build_filter(*patterns): """ Given a list of patterns, return a callable that will be true only if - the input matches one of the patterns. + the input matches at least one of the patterns. """ return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) + class PEP420PackageFinder(PackageFinder): + @staticmethod def _looks_like_package(path): return True + find_packages = PackageFinder.find setup = distutils.core.setup -_Command = _get_unpatched(_Command) +_Command = monkey.get_unpatched(distutils.core.Command) + class Command(_Command): __doc__ = _Command.__doc__ @@ -137,9 +129,6 @@ class Command(_Command): vars(cmd).update(kw) return cmd -# we can't patch distutils.cmd, alas -distutils.core.Command = Command - def _find_all_simple(path): """ @@ -165,5 +154,4 @@ def findall(dir=os.curdir): return list(files) -# fix findall bug in distutils (http://bugs.python.org/issue12885) -distutils.filelist.findall = findall +monkey.patch_all() diff --git a/setuptools/archive_util.py b/setuptools/archive_util.py index b3c9fa56..b6411cc5 100755 --- a/setuptools/archive_util.py +++ b/setuptools/archive_util.py @@ -1,24 +1,27 @@ """Utilities for extracting common archive formats""" - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - import zipfile import tarfile import os import shutil import posixpath import contextlib -from pkg_resources import ensure_directory, ContextualZipFile from distutils.errors import DistutilsError +from pkg_resources import ensure_directory, ContextualZipFile + + +__all__ = [ + "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", + "UnrecognizedFormat", "extraction_drivers", "unpack_directory", +] + + class UnrecognizedFormat(DistutilsError): """Couldn't recognize the archive type""" -def default_filter(src,dst): + +def default_filter(src, dst): """The default progress/filter callback; returns True for all files""" return dst @@ -167,4 +170,5 @@ def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): pass return True + extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/setuptools/cli-arm-32.exe b/setuptools/cli-arm-32.exe Binary files differdeleted file mode 100644 index 2f40402d..00000000 --- a/setuptools/cli-arm-32.exe +++ /dev/null diff --git a/setuptools/command/bdist_egg.py b/setuptools/command/bdist_egg.py index 9cebd7fa..cbea7537 100644 --- a/setuptools/command/bdist_egg.py +++ b/setuptools/command/bdist_egg.py @@ -8,8 +8,8 @@ from distutils import log from types import CodeType import sys import os -import marshal import textwrap +import marshal from setuptools.extern import six @@ -129,7 +129,7 @@ class bdist_egg(Command): self.distribution.data_files.append(item) try: - log.info("installing package data to %s" % self.bdist_dir) + log.info("installing package data to %s", self.bdist_dir) self.call_command('install_data', force=0, root=None) finally: self.distribution.data_files = old @@ -152,7 +152,7 @@ class bdist_egg(Command): self.run_command("egg_info") # We run install_lib before install_data, because some data hacks # pull their data path from the install_lib command. - log.info("installing library code to %s" % self.bdist_dir) + log.info("installing library code to %s", self.bdist_dir) instcmd = self.get_finalized_command('install') old_root = instcmd.root instcmd.root = None @@ -169,7 +169,7 @@ class bdist_egg(Command): pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py') self.stubs.append(pyfile) - log.info("creating stub loader for %s" % ext_name) + log.info("creating stub loader for %s", ext_name) if not self.dry_run: write_stub(os.path.basename(ext_name), pyfile) to_compile.append(pyfile) @@ -186,14 +186,14 @@ class bdist_egg(Command): self.mkpath(egg_info) if self.distribution.scripts: script_dir = os.path.join(egg_info, 'scripts') - log.info("installing scripts to %s" % script_dir) + log.info("installing scripts to %s", script_dir) self.call_command('install_scripts', install_dir=script_dir, no_ep=1) self.copy_metadata_to(egg_info) native_libs = os.path.join(egg_info, "native_libs.txt") if all_outputs: - log.info("writing %s" % native_libs) + log.info("writing %s", native_libs) if not self.dry_run: ensure_directory(native_libs) libs_file = open(native_libs, 'wt') @@ -201,7 +201,7 @@ class bdist_egg(Command): libs_file.write('\n') libs_file.close() elif os.path.isfile(native_libs): - log.info("removing %s" % native_libs) + log.info("removing %s", native_libs) if not self.dry_run: os.unlink(native_libs) @@ -432,6 +432,7 @@ def can_scan(): # Attribute names of options for commands that might need to be convinced to # install to the egg build directory + INSTALL_DIRECTORY_ATTRS = [ 'install_lib', 'install_dir', 'install_data', 'install_base' ] @@ -457,7 +458,7 @@ def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, p = path[len(base_dir) + 1:] if not dry_run: z.write(path, p) - log.debug("adding '%s'" % p) + log.debug("adding '%s'", p) compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED if not dry_run: diff --git a/setuptools/command/bdist_wininst.py b/setuptools/command/bdist_wininst.py index 073de97b..8243c917 100755 --- a/setuptools/command/bdist_wininst.py +++ b/setuptools/command/bdist_wininst.py @@ -2,6 +2,7 @@ import distutils.command.bdist_wininst as orig class bdist_wininst(orig.bdist_wininst): + def reinitialize_command(self, command, reinit_subcommands=0): """ Supplement reinitialize_command to work around diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index 1caf8c81..454c91fb 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -1,14 +1,16 @@ +import os +import sys +import itertools +import imp from distutils.command.build_ext import build_ext as _du_build_ext from distutils.file_util import copy_file from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler +from distutils.sysconfig import customize_compiler, get_config_var from distutils.errors import DistutilsError from distutils import log -import os -import sys -import itertools from setuptools.extension import Library +from setuptools.extern import six try: # Attempt to use Cython for building extensions, if available @@ -16,10 +18,8 @@ try: except ImportError: _build_ext = _du_build_ext -from distutils.sysconfig import get_config_var - -get_config_var("LDSHARED") # make sure _config_vars is initialized -del get_config_var +# make sure _config_vars is initialized +get_config_var("LDSHARED") from distutils.sysconfig import _config_vars as _CONFIG_VARS @@ -59,7 +59,18 @@ elif os.name != 'nt': if_dl = lambda s: s if have_rtld else '' + +def get_abi3_suffix(): + """Return the file extension for an abi3-compliant Extension()""" + for suffix, _, _ in (s for s in imp.get_suffixes() if s[2] == imp.C_EXTENSION): + if '.abi3' in suffix: # Unix + return suffix + elif suffix == '.pyd': # Windows + return suffix + + class build_ext(_build_ext): + def run(self): """Build extensions in build directory, then copy if --inplace""" old_inplace, self.inplace = self.inplace, 0 @@ -94,6 +105,15 @@ class build_ext(_build_ext): filename = _build_ext.get_ext_filename(self, fullname) if fullname in self.ext_map: ext = self.ext_map[fullname] + use_abi3 = ( + six.PY3 + and getattr(ext, 'py_limited_api') + and get_abi3_suffix() + ) + if use_abi3: + so_ext = get_config_var('EXT_SUFFIX') + filename = filename[:-len(so_ext)] + filename = filename + get_abi3_suffix() if isinstance(ext, Library): fn, ext = os.path.splitext(filename) return self.shlib_compiler.library_filename(fn, libtype) diff --git a/setuptools/command/build_py.py b/setuptools/command/build_py.py index 0bad8295..b5de9bda 100644 --- a/setuptools/command/build_py.py +++ b/setuptools/command/build_py.py @@ -15,6 +15,7 @@ try: from setuptools.lib2to3_ex import Mixin2to3 except ImportError: class Mixin2to3: + def run_2to3(self, files, doctests=True): "do nothing" diff --git a/setuptools/command/develop.py b/setuptools/command/develop.py index 11b5df10..3eb86120 100755 --- a/setuptools/command/develop.py +++ b/setuptools/command/develop.py @@ -186,6 +186,7 @@ class VersionlessRequirement(object): >>> str(adapted_dist.as_requirement()) 'foo' """ + def __init__(self, dist): self.__dist = dist diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index ccc66cf7..a3792ce2 100755 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -8,7 +8,7 @@ A tool for doing automatic download/extract/build of distutils-based Python packages. For detailed documentation, see the accompanying EasyInstall.txt file, or visit the `EasyInstall home page`__. -__ https://pythonhosted.org/setuptools/easy_install.html +__ https://setuptools.readthedocs.io/en/latest/easy_install.html """ @@ -32,7 +32,6 @@ import zipfile import re import stat import random -import platform import textwrap import warnings import site @@ -50,8 +49,9 @@ from setuptools.sandbox import run_setup from setuptools.py31compat import get_path, get_config_vars from setuptools.command import setopt from setuptools.archive_util import unpack_archive -from setuptools.package_index import PackageIndex -from setuptools.package_index import URL_SCHEME +from setuptools.package_index import ( + PackageIndex, parse_requirement_arg, URL_SCHEME, +) from setuptools.command import bdist_egg, egg_info from pkg_resources import ( yield_lines, normalize_path, resource_string, ensure_directory, @@ -432,7 +432,7 @@ class easy_install(Command): """ try: pid = os.getpid() - except: + except Exception: pid = random.randint(0, sys.maxsize) return os.path.join(self.install_dir, "test-easy-install-%s" % pid) @@ -513,7 +513,7 @@ class easy_install(Command): For information on other options, you may wish to consult the documentation at: - https://pythonhosted.org/setuptools/easy_install.html + https://setuptools.readthedocs.io/en/latest/easy_install.html Please make the appropriate changes for your system and try again. """).lstrip() @@ -930,7 +930,7 @@ class easy_install(Command): destination, fix_zipimporter_caches=new_dist_is_zipped, ) - except: + except Exception: update_dist_caches(destination, fix_zipimporter_caches=False) raise @@ -1257,7 +1257,8 @@ class easy_install(Command): * You can set up the installation directory to support ".pth" files by using one of the approaches described here: - https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations + https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations + Please make the appropriate changes for your system and try again.""").lstrip() @@ -1522,15 +1523,6 @@ def get_exe_prefixes(exe_filename): return prefixes -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) - - class PthDistributions(Environment): """A .pth file with Distribution paths in it""" @@ -1662,7 +1654,7 @@ class RewritePthDistributions(PthDistributions): """) -if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'rewrite') == 'rewrite': +if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite': PthDistributions = RewritePthDistributions @@ -1832,6 +1824,7 @@ def _remove_and_clear_zip_directory_cache_data(normalized_path): normalized_path, zipimport._zip_directory_cache, updater=clear_and_remove_cached_zip_archive_directory_data) + # PyPy Python implementation does not allow directly writing to the # zipimport._zip_directory_cache and so prevents us from attempting to correct # its content. The best we can do there is clear the problematic cache content @@ -1987,10 +1980,20 @@ class CommandSpec(list): return self._render(self + list(self.options)) @staticmethod + def _strip_quotes(item): + _QUOTES = '"\'' + for q in _QUOTES: + if item.startswith(q) and item.endswith(q): + return item[1:-1] + return item + + @staticmethod def _render(items): - cmdline = subprocess.list2cmdline(items) + cmdline = subprocess.list2cmdline( + CommandSpec._strip_quotes(item.strip()) for item in items) return '#!' + cmdline + '\n' + # For pbr compat; will be removed in a future version. sys_executable = CommandSpec._sys_executable() @@ -2008,10 +2011,12 @@ class ScriptWriter(object): template = textwrap.dedent(""" # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r __requires__ = %(spec)r + import re import sys from pkg_resources import load_entry_point if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit( load_entry_point(%(spec)r, %(group)r, %(name)r)() ) @@ -2159,6 +2164,7 @@ class WindowsScriptWriter(ScriptWriter): class WindowsExecutableLauncherWriter(WindowsScriptWriter): + @classmethod def _get_script_args(cls, type_, name, header, script_text): """ @@ -2203,8 +2209,6 @@ def get_win_launcher(type): Returns the executable as a byte string. """ launcher_fn = '%s.exe' % type - if platform.machine().lower() == 'arm': - launcher_fn = launcher_fn.replace(".", "-arm.") if is_64bit(): launcher_fn = launcher_fn.replace(".", "-64.") else: @@ -2221,39 +2225,7 @@ def load_launcher_manifest(name): def rmtree(path, ignore_errors=False, onerror=auto_chmod): - """Recursively delete a directory tree. - - This code is taken from the Python 2.4 version of 'shutil', because - the 2.3 version doesn't really work right. - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - names = [] - try: - names = os.listdir(path) - except os.error: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) + return shutil.rmtree(path, ignore_errors, onerror) def current_umask(): diff --git a/setuptools/command/egg_info.py b/setuptools/command/egg_info.py index 8e1502a5..5183eedc 100755 --- a/setuptools/command/egg_info.py +++ b/setuptools/command/egg_info.py @@ -52,8 +52,10 @@ class egg_info(Command): ] boolean_options = ['tag-date', 'tag-svn-revision'] - negative_opt = {'no-svn-revision': 'tag-svn-revision', - 'no-date': 'tag-date'} + negative_opt = { + 'no-svn-revision': 'tag-svn-revision', + 'no-date': 'tag-date', + } def initialize_options(self): self.egg_name = None @@ -197,6 +199,10 @@ class egg_info(Command): if self.tag_build: version += self.tag_build if self.tag_svn_revision: + warnings.warn( + "tag_svn_revision is deprecated and will not be honored " + "in a future release" + ) version += '-r%s' % self.get_svn_revision() if self.tag_date: version += time.strftime("-%Y%m%d") diff --git a/setuptools/command/install_lib.py b/setuptools/command/install_lib.py index 78fe6891..2b31c3e3 100644 --- a/setuptools/command/install_lib.py +++ b/setuptools/command/install_lib.py @@ -3,6 +3,7 @@ import imp from itertools import product, starmap import distutils.command.install_lib as orig + class install_lib(orig.install_lib): """Don't add compiled flags to filenames of non-Python files""" diff --git a/setuptools/command/install_scripts.py b/setuptools/command/install_scripts.py index be66cb22..16234273 100755 --- a/setuptools/command/install_scripts.py +++ b/setuptools/command/install_scripts.py @@ -1,6 +1,7 @@ from distutils import log import distutils.command.install_scripts as orig import os +import sys from pkg_resources import Distribution, PathMetadata, ensure_directory @@ -37,6 +38,10 @@ class install_scripts(orig.install_scripts): if is_wininst: exec_param = "python.exe" writer = ei.WindowsScriptWriter + if exec_param == sys.executable: + # In case the path to the Python executable contains a space, wrap + # it so it's not split up. + exec_param = [exec_param] # resolve the writer to the environment writer = writer.best() cmd = writer.command_spec_class.best().from_param(exec_param) diff --git a/setuptools/command/sdist.py b/setuptools/command/sdist.py index 6640d4e3..1d4f5d54 100755 --- a/setuptools/command/sdist.py +++ b/setuptools/command/sdist.py @@ -4,6 +4,7 @@ import distutils.command.sdist as orig import os import sys import io +import contextlib from setuptools.extern import six @@ -15,6 +16,7 @@ READMES = 'README', 'README.rst', 'README.txt' _default_revctrl = list + def walk_revctrl(dirname=''): """Find all files under revision control""" for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): @@ -64,6 +66,43 @@ class sdist(orig.sdist): if data not in dist_files: dist_files.append(data) + def initialize_options(self): + orig.sdist.initialize_options(self) + + self._default_to_gztar() + + def _default_to_gztar(self): + # only needed on Python prior to 3.6. + if sys.version_info >= (3, 6, 0, 'beta', 1): + return + self.formats = ['gztar'] + + def make_distribution(self): + """ + Workaround for #516 + """ + with self._remove_os_link(): + orig.sdist.make_distribution(self) + + @staticmethod + @contextlib.contextmanager + def _remove_os_link(): + """ + In a context, remove and restore os.link if it exists + """ + class NoValue: + pass + orig_val = getattr(os, 'link', NoValue) + try: + del os.link + except Exception: + pass + try: + yield + finally: + if orig_val is not NoValue: + setattr(os, 'link', orig_val) + def __read_template_hack(self): # This grody hack closes the template file (MANIFEST.in) if an # exception occurs during read_template. @@ -71,7 +110,7 @@ class sdist(orig.sdist): # file. try: orig.sdist.read_template(self) - except: + except Exception: _, _, tb = sys.exc_info() tb.tb_next.tb_frame.f_locals['template'].close() raise @@ -179,7 +218,7 @@ class sdist(orig.sdist): distribution. """ log.info("reading manifest file '%s'", self.manifest) - manifest = open(self.manifest, 'rbU') + manifest = open(self.manifest, 'rb') for line in manifest: # The manifest must contain UTF-8. See #303. if six.PY3: diff --git a/setuptools/command/test.py b/setuptools/command/test.py index 39746a02..38bbcd8b 100644 --- a/setuptools/command/test.py +++ b/setuptools/command/test.py @@ -1,10 +1,13 @@ +import os +import operator import sys import contextlib +import itertools from distutils.errors import DistutilsOptionError from unittest import TestLoader from setuptools.extern import six -from setuptools.extern.six.moves import map +from setuptools.extern.six.moves import map, filter from pkg_resources import (resource_listdir, resource_exists, normalize_path, working_set, _namespace_packages, @@ -14,6 +17,7 @@ from setuptools.py31compat import unittest_main class ScanningLoader(TestLoader): + def loadTestsFromModule(self, module, pattern=None): """Return a suite of all tests cases contained in the given module @@ -46,6 +50,7 @@ class ScanningLoader(TestLoader): # adapted from jaraco.classes.properties:NonDataProperty class NonDataProperty(object): + def __init__(self, fget): self.fget = fget @@ -110,7 +115,7 @@ class test(Command): func() @contextlib.contextmanager - def project_on_sys_path(self): + def project_on_sys_path(self, include_dists=[]): with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False) if with_2to3: @@ -142,23 +147,57 @@ class test(Command): old_modules = sys.modules.copy() try: - sys.path.insert(0, normalize_path(ei_cmd.egg_base)) + project_path = normalize_path(ei_cmd.egg_base) + sys.path.insert(0, project_path) working_set.__init__() add_activation_listener(lambda dist: dist.activate()) require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - yield + with self.paths_on_pythonpath([project_path]): + yield finally: sys.path[:] = old_path sys.modules.clear() sys.modules.update(old_modules) working_set.__init__() + @staticmethod + @contextlib.contextmanager + def paths_on_pythonpath(paths): + """ + Add the indicated paths to the head of the PYTHONPATH environment + variable so that subprocesses will also see the packages at + these paths. + + Do this in a context that restores the value on exit. + """ + nothing = object() + orig_pythonpath = os.environ.get('PYTHONPATH', nothing) + current_pythonpath = os.environ.get('PYTHONPATH', '') + try: + prefix = os.pathsep.join(paths) + to_join = filter(None, [prefix, current_pythonpath]) + new_path = os.pathsep.join(to_join) + if new_path: + os.environ['PYTHONPATH'] = new_path + yield + finally: + if orig_pythonpath is nothing: + os.environ.pop('PYTHONPATH', None) + else: + os.environ['PYTHONPATH'] = orig_pythonpath + + @staticmethod + def install_dists(dist): + """ + Install the requirements indicated by self.distribution and + return an iterable of the dists that were built. + """ + ir_d = dist.fetch_build_eggs(dist.install_requires or []) + tr_d = dist.fetch_build_eggs(dist.tests_require or []) + return itertools.chain(ir_d, tr_d) + def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs( - self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) + installed_dists = self.install_dists(self.distribution) cmd = ' '.join(self._argv) if self.dry_run: @@ -166,8 +205,11 @@ class test(Command): return self.announce('running "%s"' % cmd) - with self.project_on_sys_path(): - self.run_tests() + + paths = map(operator.attrgetter('location'), installed_dists) + with self.paths_on_pythonpath(paths): + with self.project_on_sys_path(): + self.run_tests() def run_tests(self): # Purge modules under test from sys.modules. The test loader will diff --git a/setuptools/command/upload_docs.py b/setuptools/command/upload_docs.py index 01b49046..269dc2d5 100644 --- a/setuptools/command/upload_docs.py +++ b/setuptools/command/upload_docs.py @@ -29,6 +29,10 @@ def _encode(s): class upload_docs(upload): + # override the default repository as upload_docs isn't + # supported by Warehouse (and won't be). + DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/' + description = 'Upload documentation to PyPI' user_options = [ @@ -53,6 +57,7 @@ class upload_docs(upload): self.target_dir = None def finalize_options(self): + log.warn("Upload_docs command is deprecated. Use RTD instead.") upload.finalize_options(self) if self.upload_dir is None: if self.has_sphinx(): @@ -105,7 +110,7 @@ class upload_docs(upload): if not isinstance(values, list): values = [values] for value in values: - if type(value) is tuple: + if isinstance(value, tuple): title += '; filename="%s"' % value[0] value = value[1] else: diff --git a/setuptools/depends.py b/setuptools/depends.py index 9f7c9a35..d5a344ad 100644 --- a/setuptools/depends.py +++ b/setuptools/depends.py @@ -1,8 +1,8 @@ import sys import imp import marshal -from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN from distutils.version import StrictVersion +from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN from setuptools.extern import six @@ -10,6 +10,7 @@ __all__ = [ 'Require', 'find_module', 'get_module_constant', 'extract_constant' ] + class Require: """A prerequisite to building or installing a distribution""" @@ -30,7 +31,7 @@ class Require: def full_name(self): """Return full package/distribution name, w/version""" if self.requested_version is not None: - return '%s-%s' % (self.name,self.requested_version) + return '%s-%s' % (self.name, self.requested_version) return self.name def version_ok(self, version): @@ -39,7 +40,6 @@ class Require: str(version) != "unknown" and version >= self.requested_version def get_version(self, paths=None, default="unknown"): - """Get version number of installed module, 'None', or 'default' Search 'paths' for module. If not found, return 'None'. If found, @@ -52,8 +52,9 @@ class Require: if self.attribute is None: try: - f,p,i = find_module(self.module,paths) - if f: f.close() + f, p, i = find_module(self.module, paths) + if f: + f.close() return default except ImportError: return None @@ -78,28 +79,27 @@ class Require: def _iter_code(code): - """Yield '(op,arg)' pair for each operation in code object 'code'""" from array import array from dis import HAVE_ARGUMENT, EXTENDED_ARG - bytes = array('b',code.co_code) + bytes = array('b', code.co_code) eof = len(code.co_code) ptr = 0 extended_arg = 0 - while ptr<eof: + while ptr < eof: op = bytes[ptr] - if op>=HAVE_ARGUMENT: + if op >= HAVE_ARGUMENT: - arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg + arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg ptr += 3 - if op==EXTENDED_ARG: + if op == EXTENDED_ARG: long_type = six.integer_types[-1] extended_arg = arg * long_type(65536) continue @@ -108,7 +108,7 @@ def _iter_code(code): arg = None ptr += 1 - yield op,arg + yield op, arg def find_module(module, paths=None): @@ -118,20 +118,19 @@ def find_module(module, paths=None): while parts: part = parts.pop(0) - f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) + f, path, (suffix, mode, kind) = info = imp.find_module(part, paths) - if kind==PKG_DIRECTORY: + if kind == PKG_DIRECTORY: parts = parts or ['__init__'] paths = [path] elif parts: - raise ImportError("Can't find %r in %s" % (parts,module)) + raise ImportError("Can't find %r in %s" % (parts, module)) return info def get_module_constant(module, symbol, default=-1, paths=None): - """Find 'module' by searching 'paths', and extract 'symbol' Return 'None' if 'module' does not exist on 'paths', or it does not define @@ -145,12 +144,12 @@ def get_module_constant(module, symbol, default=-1, paths=None): return None try: - if kind==PY_COMPILED: + if kind == PY_COMPILED: f.read(8) # skip magic & date code = marshal.load(f) - elif kind==PY_FROZEN: + elif kind == PY_FROZEN: code = imp.get_frozen_object(module) - elif kind==PY_SOURCE: + elif kind == PY_SOURCE: code = compile(f.read(), path, 'exec') else: # Not something we can parse; we'll have to import it. :( @@ -192,9 +191,9 @@ def extract_constant(code, symbol, default=-1): for op, arg in _iter_code(code): - if op==LOAD_CONST: + if op == LOAD_CONST: const = code.co_consts[arg] - elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): + elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL): return const else: const = default @@ -214,4 +213,5 @@ def _update_globals(): del globals()[name] __all__.remove(name) + _update_globals() diff --git a/setuptools/dist.py b/setuptools/dist.py index 086e0a58..364f2b4d 100644 --- a/setuptools/dist.py +++ b/setuptools/dist.py @@ -2,16 +2,15 @@ __all__ = ['Distribution'] import re import os -import sys import warnings import numbers import distutils.log import distutils.core import distutils.cmd import distutils.dist -from distutils.core import Distribution as _Distribution from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError) +from distutils.util import rfc822_escape from setuptools.extern import six from setuptools.extern.six.moves import map @@ -19,70 +18,94 @@ from pkg_resources.extern import packaging from setuptools.depends import Require from setuptools import windows_support +from setuptools.monkey import get_unpatched import pkg_resources def _get_unpatched(cls): - """Protect against re-patching the distutils if reloaded + warnings.warn("Do not call this function", DeprecationWarning) + return get_unpatched(cls) - Also ensures that no other distutils extension monkeypatched the distutils - first. - """ - while cls.__module__.startswith('setuptools'): - cls, = cls.__bases__ - if not cls.__module__.startswith('distutils'): - raise AssertionError( - "distutils has already been patched by %r" % cls - ) - return cls -_Distribution = _get_unpatched(_Distribution) - -def _patch_distribution_metadata_write_pkg_info(): +# Based on Python 3.5 version +def write_pkg_file(self, file): + """Write the PKG-INFO format data to a file object. """ - Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local - encoding to save the pkg_info. Monkey-patch its write_pkg_info method to - correct this undesirable behavior. + version = '1.0' + if (self.provides or self.requires or self.obsoletes or + self.classifiers or self.download_url): + version = '1.1' + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + version = '1.2' + + file.write('Metadata-Version: %s\n' % version) + file.write('Name: %s\n' % self.get_name()) + file.write('Version: %s\n' % self.get_version()) + file.write('Summary: %s\n' % self.get_description()) + file.write('Home-page: %s\n' % self.get_url()) + file.write('Author: %s\n' % self.get_contact()) + file.write('Author-email: %s\n' % self.get_contact_email()) + file.write('License: %s\n' % self.get_license()) + if self.download_url: + file.write('Download-URL: %s\n' % self.download_url) + + long_desc = rfc822_escape(self.get_long_description()) + file.write('Description: %s\n' % long_desc) + + keywords = ','.join(self.get_keywords()) + if keywords: + file.write('Keywords: %s\n' % keywords) + + self._write_list(file, 'Platform', self.get_platforms()) + self._write_list(file, 'Classifier', self.get_classifiers()) + + # PEP 314 + self._write_list(file, 'Requires', self.get_requires()) + self._write_list(file, 'Provides', self.get_provides()) + self._write_list(file, 'Obsoletes', self.get_obsoletes()) + + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + file.write('Requires-Python: %s\n' % self.python_requires) + + +# from Python 3.4 +def write_pkg_info(self, base_dir): + """Write the PKG-INFO file into the release tree. """ - environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) - if not environment_local: - return - - # from Python 3.4 - def write_pkg_info(self, base_dir): - """Write the PKG-INFO file into the release tree. - """ - with open(os.path.join(base_dir, 'PKG-INFO'), 'w', - encoding='UTF-8') as pkg_info: - self.write_pkg_file(pkg_info) + with open(os.path.join(base_dir, 'PKG-INFO'), 'w', + encoding='UTF-8') as pkg_info: + self.write_pkg_file(pkg_info) - distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info -_patch_distribution_metadata_write_pkg_info() sequence = tuple, list + def check_importable(dist, attr, value): try: - ep = pkg_resources.EntryPoint.parse('x='+value) + ep = pkg_resources.EntryPoint.parse('x=' + value) assert not ep.extras - except (TypeError,ValueError,AttributeError,AssertionError): + except (TypeError, ValueError, AttributeError, AssertionError): raise DistutilsSetupError( "%r must be importable 'module:attrs' string (got %r)" - % (attr,value) + % (attr, value) ) def assert_string_list(dist, attr, value): """Verify that value is a string list or None""" try: - assert ''.join(value)!=value - except (TypeError,ValueError,AttributeError,AssertionError): + assert ''.join(value) != value + except (TypeError, ValueError, AttributeError, AssertionError): raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr,value) + "%r must be a list of strings (got %r)" % (attr, value) ) + + def check_nsp(dist, attr, value): """Verify that namespace packages are valid""" - assert_string_list(dist,attr,value) + assert_string_list(dist, attr, value) for nsp in value: if not dist.has_contents_for(nsp): raise DistutilsSetupError( @@ -97,22 +120,24 @@ def check_nsp(dist, attr, value): " is not: please correct this in setup.py", nsp, parent ) + def check_extras(dist, attr, value): """Verify that extras_require mapping is valid""" try: - for k,v in value.items(): + for k, v in value.items(): if ':' in k: - k,m = k.split(':',1) + k, m = k.split(':', 1) if pkg_resources.invalid_marker(m): - raise DistutilsSetupError("Invalid environment marker: "+m) + raise DistutilsSetupError("Invalid environment marker: " + m) list(pkg_resources.parse_requirements(v)) - except (TypeError,ValueError,AttributeError): + except (TypeError, ValueError, AttributeError): raise DistutilsSetupError( "'extras_require' must be a dictionary whose values are " "strings or lists of strings containing valid project/version " "requirement specifiers." ) + def assert_bool(dist, attr, value): """Verify that value is True, False, 0, or 1""" if bool(value) != value: @@ -131,6 +156,19 @@ def check_requirements(dist, attr, value): ) raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) + +def check_specifier(dist, attr, value): + """Verify that value is a valid version specifier""" + try: + packaging.specifiers.SpecifierSet(value) + except packaging.specifiers.InvalidSpecifier as error: + tmpl = ( + "{attr!r} must be a string or list of strings " + "containing valid version specifiers; {error}" + ) + raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) + + def check_entry_points(dist, attr, value): """Verify that entry_points map is parseable""" try: @@ -138,25 +176,30 @@ def check_entry_points(dist, attr, value): except ValueError as e: raise DistutilsSetupError(e) + def check_test_suite(dist, attr, value): if not isinstance(value, six.string_types): raise DistutilsSetupError("test_suite must be a string") + def check_package_data(dist, attr, value): """Verify that value is a dictionary of package names to glob lists""" - if isinstance(value,dict): - for k,v in value.items(): - if not isinstance(k,str): break - try: iter(v) + if isinstance(value, dict): + for k, v in value.items(): + if not isinstance(k, str): + break + try: + iter(v) except TypeError: break else: return raise DistutilsSetupError( - attr+" must be a dictionary mapping package names to lists of " + attr + " must be a dictionary mapping package names to lists of " "wildcard patterns" ) + def check_packages(dist, attr, value): for pkgname in value: if not re.match(r'\w+(\.\w+)*', pkgname): @@ -166,6 +209,9 @@ def check_packages(dist, attr, value): ) +_Distribution = get_unpatched(distutils.core.Distribution) + + class Distribution(_Distribution): """Distribution with support for features, tests, and package data @@ -264,12 +310,12 @@ class Distribution(_Distribution): # Make sure we have any eggs needed to interpret 'attrs' if attrs is not None: self.dependency_links = attrs.pop('dependency_links', []) - assert_string_list(self,'dependency_links',self.dependency_links) + assert_string_list(self, 'dependency_links', self.dependency_links) if attrs and 'setup_requires' in attrs: self.fetch_build_eggs(attrs['setup_requires']) for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): vars(self).setdefault(ep.name, None) - _Distribution.__init__(self,attrs) + _Distribution.__init__(self, attrs) if isinstance(self.metadata.version, numbers.Number): # Some people apparently take "version number" too literally :) self.metadata.version = str(self.metadata.version) @@ -293,6 +339,8 @@ class Distribution(_Distribution): "setuptools, pip, and PyPI. Please see PEP 440 for more " "details." % self.metadata.version ) + if getattr(self, 'python_requires', None): + self.metadata.python_requires = self.python_requires def parse_command_line(self): """Process features after parsing command line options""" @@ -301,9 +349,9 @@ class Distribution(_Distribution): self._finalize_features() return result - def _feature_attrname(self,name): + def _feature_attrname(self, name): """Convert feature name to corresponding option attribute name""" - return 'with_'+name.replace('-','_') + return 'with_' + name.replace('-', '_') def fetch_build_eggs(self, requires): """Resolve pre-setup requirements""" @@ -314,6 +362,7 @@ class Distribution(_Distribution): ) for dist in resolved_dists: pkg_resources.working_set.add(dist, replace=True) + return resolved_dists def finalize_options(self): _Distribution.finalize_options(self) @@ -321,7 +370,7 @@ class Distribution(_Distribution): self._set_global_opts_from_features() for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - value = getattr(self,ep.name,None) + value = getattr(self, ep.name, None) if value is not None: ep.require(installer=self.fetch_build_egg) ep.load()(self, ep.name, value) @@ -354,7 +403,7 @@ class Distribution(_Distribution): cmd.package_index.to_scan = [] except AttributeError: from setuptools.command.easy_install import easy_install - dist = self.__class__({'script_args':['easy_install']}) + dist = self.__class__({'script_args': ['easy_install']}) dist.parse_config_files() opts = dist.get_option_dict('easy_install') keep = ( @@ -385,20 +434,20 @@ class Distribution(_Distribution): go = [] no = self.negative_opt.copy() - for name,feature in self.features.items(): - self._set_feature(name,None) + for name, feature in self.features.items(): + self._set_feature(name, None) feature.validate(self) if feature.optional: descr = feature.description incdef = ' (default)' - excdef='' + excdef = '' if not feature.include_by_default(): excdef, incdef = incdef, excdef - go.append(('with-'+name, None, 'include '+descr+incdef)) - go.append(('without-'+name, None, 'exclude '+descr+excdef)) - no['without-'+name] = 'with-'+name + go.append(('with-' + name, None, 'include ' + descr + incdef)) + go.append(('without-' + name, None, 'exclude ' + descr + excdef)) + no['without-' + name] = 'with-' + name self.global_options = self.feature_options = go + self.global_options self.negative_opt = self.feature_negopt = no @@ -407,25 +456,25 @@ class Distribution(_Distribution): """Add/remove features and resolve dependencies between them""" # First, flag all the enabled items (and thus their dependencies) - for name,feature in self.features.items(): + for name, feature in self.features.items(): enabled = self.feature_is_included(name) if enabled or (enabled is None and feature.include_by_default()): feature.include_in(self) - self._set_feature(name,1) + self._set_feature(name, 1) # Then disable the rest, so that off-by-default features don't # get flagged as errors when they're required by an enabled feature - for name,feature in self.features.items(): + for name, feature in self.features.items(): if not self.feature_is_included(name): feature.exclude_from(self) - self._set_feature(name,0) + self._set_feature(name, 0) def get_command_class(self, command): """Pluggable version of get_command_class()""" if command in self.cmdclass: return self.cmdclass[command] - for ep in pkg_resources.iter_entry_points('distutils.commands',command): + for ep in pkg_resources.iter_entry_points('distutils.commands', command): ep.require(installer=self.fetch_build_egg) self.cmdclass[command] = cmdclass = ep.load() return cmdclass @@ -448,26 +497,26 @@ class Distribution(_Distribution): self.cmdclass[ep.name] = cmdclass return _Distribution.get_command_list(self) - def _set_feature(self,name,status): + def _set_feature(self, name, status): """Set feature's inclusion status""" - setattr(self,self._feature_attrname(name),status) + setattr(self, self._feature_attrname(name), status) - def feature_is_included(self,name): + def feature_is_included(self, name): """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" - return getattr(self,self._feature_attrname(name)) + return getattr(self, self._feature_attrname(name)) - def include_feature(self,name): + def include_feature(self, name): """Request inclusion of feature named 'name'""" - if self.feature_is_included(name)==0: + if self.feature_is_included(name) == 0: descr = self.features[name].description raise DistutilsOptionError( descr + " is required, but was excluded or is not available" ) self.features[name].include_in(self) - self._set_feature(name,1) + self._set_feature(name, 1) - def include(self,**attrs): + def include(self, **attrs): """Add items to distribution that are named in keyword arguments For example, 'dist.exclude(py_modules=["x"])' would add 'x' to @@ -482,86 +531,86 @@ class Distribution(_Distribution): will try to call 'dist._include_foo({"bar":"baz"})', which can then handle whatever special inclusion logic is needed. """ - for k,v in attrs.items(): - include = getattr(self, '_include_'+k, None) + for k, v in attrs.items(): + include = getattr(self, '_include_' + k, None) if include: include(v) else: - self._include_misc(k,v) + self._include_misc(k, v) - def exclude_package(self,package): + def exclude_package(self, package): """Remove packages, modules, and extensions in named package""" - pfx = package+'.' + pfx = package + '.' if self.packages: self.packages = [ p for p in self.packages - if p != package and not p.startswith(pfx) + if p != package and not p.startswith(pfx) ] if self.py_modules: self.py_modules = [ p for p in self.py_modules - if p != package and not p.startswith(pfx) + if p != package and not p.startswith(pfx) ] if self.ext_modules: self.ext_modules = [ p for p in self.ext_modules - if p.name != package and not p.name.startswith(pfx) + if p.name != package and not p.name.startswith(pfx) ] - def has_contents_for(self,package): + def has_contents_for(self, package): """Return true if 'exclude_package(package)' would do something""" - pfx = package+'.' + pfx = package + '.' for p in self.iter_distribution_names(): - if p==package or p.startswith(pfx): + if p == package or p.startswith(pfx): return True - def _exclude_misc(self,name,value): + def _exclude_misc(self, name, value): """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): + if not isinstance(value, sequence): raise DistutilsSetupError( "%s: setting must be a list or tuple (%r)" % (name, value) ) try: - old = getattr(self,name) + old = getattr(self, name) except AttributeError: raise DistutilsSetupError( "%s: No such distribution setting" % name ) - if old is not None and not isinstance(old,sequence): + if old is not None and not isinstance(old, sequence): raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" + name + ": this setting cannot be changed via include/exclude" ) elif old: - setattr(self,name,[item for item in old if item not in value]) + setattr(self, name, [item for item in old if item not in value]) - def _include_misc(self,name,value): + def _include_misc(self, name, value): """Handle 'include()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): + if not isinstance(value, sequence): raise DistutilsSetupError( "%s: setting must be a list (%r)" % (name, value) ) try: - old = getattr(self,name) + old = getattr(self, name) except AttributeError: raise DistutilsSetupError( "%s: No such distribution setting" % name ) if old is None: - setattr(self,name,value) - elif not isinstance(old,sequence): + setattr(self, name, value) + elif not isinstance(old, sequence): raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" + name + ": this setting cannot be changed via include/exclude" ) else: - setattr(self,name,old+[item for item in value if item not in old]) + setattr(self, name, old + [item for item in value if item not in old]) - def exclude(self,**attrs): + def exclude(self, **attrs): """Remove items from distribution that are named in keyword arguments For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from @@ -577,15 +626,15 @@ class Distribution(_Distribution): will try to call 'dist._exclude_foo({"bar":"baz"})', which can then handle whatever special exclusion logic is needed. """ - for k,v in attrs.items(): - exclude = getattr(self, '_exclude_'+k, None) + for k, v in attrs.items(): + exclude = getattr(self, '_exclude_' + k, None) if exclude: exclude(v) else: - self._exclude_misc(k,v) + self._exclude_misc(k, v) - def _exclude_packages(self,packages): - if not isinstance(packages,sequence): + def _exclude_packages(self, packages): + if not isinstance(packages, sequence): raise DistutilsSetupError( "packages: setting must be a list or tuple (%r)" % (packages,) ) @@ -600,17 +649,17 @@ class Distribution(_Distribution): command = args[0] aliases = self.get_option_dict('aliases') while command in aliases: - src,alias = aliases[command] + src, alias = aliases[command] del aliases[command] # ensure each alias can expand only once! import shlex - args[:1] = shlex.split(alias,True) + args[:1] = shlex.split(alias, True) command = args[0] nargs = _Distribution._parse_command_opts(self, parser, args) # Handle commands that want to consume all remaining arguments cmd_class = self.get_command_class(command) - if getattr(cmd_class,'command_consumes_arguments',None): + if getattr(cmd_class, 'command_consumes_arguments', None): self.get_option_dict(command)['args'] = ("command line", nargs) if nargs is not None: return [] @@ -629,31 +678,31 @@ class Distribution(_Distribution): d = {} - for cmd,opts in self.command_options.items(): + for cmd, opts in self.command_options.items(): - for opt,(src,val) in opts.items(): + for opt, (src, val) in opts.items(): if src != "command line": continue - opt = opt.replace('_','-') + opt = opt.replace('_', '-') - if val==0: + if val == 0: cmdobj = self.get_command_obj(cmd) neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj,'negative_opt',{})) - for neg,pos in neg_opt.items(): - if pos==opt: - opt=neg - val=None + neg_opt.update(getattr(cmdobj, 'negative_opt', {})) + for neg, pos in neg_opt.items(): + if pos == opt: + opt = neg + val = None break else: raise AssertionError("Shouldn't be able to get here") - elif val==1: + elif val == 1: val = None - d.setdefault(cmd,{})[opt] = val + d.setdefault(cmd, {})[opt] = val return d @@ -667,7 +716,7 @@ class Distribution(_Distribution): yield module for ext in self.ext_modules or (): - if isinstance(ext,tuple): + if isinstance(ext, tuple): name, buildinfo = ext else: name = ext.name @@ -711,11 +760,6 @@ class Distribution(_Distribution): sys.stdout.detach(), encoding, errors, newline, line_buffering) -# Install it throughout the distutils -for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = Distribution - - class Feature: """ **deprecated** -- The `Feature` facility was never completely implemented @@ -790,16 +834,17 @@ class Feature: self.standard = standard self.available = available self.optional = optional - if isinstance(require_features,(str,Require)): + if isinstance(require_features, (str, Require)): require_features = require_features, self.require_features = [ - r for r in require_features if isinstance(r,str) + r for r in require_features if isinstance(r, str) ] - er = [r for r in require_features if not isinstance(r,str)] - if er: extras['require_features'] = er + er = [r for r in require_features if not isinstance(r, str)] + if er: + extras['require_features'] = er - if isinstance(remove,str): + if isinstance(remove, str): remove = remove, self.remove = remove self.extras = extras @@ -814,8 +859,7 @@ class Feature: """Should this feature be included by default?""" return self.available and self.standard - def include_in(self,dist): - + def include_in(self, dist): """Ensure feature and its requirements are included in distribution You may override this in a subclass to perform additional operations on @@ -826,7 +870,7 @@ class Feature: if not self.available: raise DistutilsPlatformError( - self.description+" is required, " + self.description + " is required, " "but is not available on this platform" ) @@ -835,8 +879,7 @@ class Feature: for f in self.require_features: dist.include_feature(f) - def exclude_from(self,dist): - + def exclude_from(self, dist): """Ensure feature is excluded from distribution You may override this in a subclass to perform additional operations on @@ -851,8 +894,7 @@ class Feature: for item in self.remove: dist.exclude_package(item) - def validate(self,dist): - + def validate(self, dist): """Verify that feature makes sense in context of distribution This method is called by the distribution just before it parses its diff --git a/setuptools/extension.py b/setuptools/extension.py index d10609b6..03068d35 100644 --- a/setuptools/extension.py +++ b/setuptools/extension.py @@ -1,4 +1,3 @@ -import sys import re import functools import distutils.core @@ -7,18 +6,14 @@ import distutils.extension from setuptools.extern.six.moves import map -from .dist import _get_unpatched -from . import msvc9_support +from .monkey import get_unpatched -_Extension = _get_unpatched(distutils.core.Extension) - -msvc9_support.patch_for_specialized_compiler() def _have_cython(): """ Return True if Cython can be imported. """ - cython_impl = 'Cython.Distutils.build_ext', + cython_impl = 'Cython.Distutils.build_ext' try: # from (cython_impl) import build_ext __import__(cython_impl, fromlist=['build_ext']).build_ext @@ -27,13 +22,23 @@ def _have_cython(): pass return False + # for compatibility have_pyrex = _have_cython +_Extension = get_unpatched(distutils.core.Extension) + + class Extension(_Extension): """Extension that uses '.c' files in place of '.pyx' files""" + def __init__(self, name, sources, *args, **kw): + # The *args is needed for compatibility as calls may use positional + # arguments. py_limited_api may be set only via keyword. + self.py_limited_api = kw.pop("py_limited_api", False) + _Extension.__init__(self, name, sources, *args, **kw) + def _convert_pyx_sources_to_lang(self): """ Replace sources with .pyx extensions to sources with the target @@ -48,10 +53,6 @@ class Extension(_Extension): sub = functools.partial(re.sub, '.pyx$', target_ext) self.sources = list(map(sub, self.sources)) + class Library(Extension): """Just like a regular Extension, but built as a library instead""" - -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/setuptools/gui-arm-32.exe b/setuptools/gui-arm-32.exe Binary files differdeleted file mode 100644 index 537aff37..00000000 --- a/setuptools/gui-arm-32.exe +++ /dev/null diff --git a/setuptools/launch.py b/setuptools/launch.py index b05cbd2c..308283ea 100644 --- a/setuptools/launch.py +++ b/setuptools/launch.py @@ -18,9 +18,9 @@ def run(): __builtins__ script_name = sys.argv[1] namespace = dict( - __file__ = script_name, - __name__ = '__main__', - __doc__ = None, + __file__=script_name, + __name__='__main__', + __doc__=None, ) sys.argv[:] = sys.argv[1:] diff --git a/setuptools/lib2to3_ex.py b/setuptools/lib2to3_ex.py index feef591a..c8632bc5 100644 --- a/setuptools/lib2to3_ex.py +++ b/setuptools/lib2to3_ex.py @@ -10,9 +10,12 @@ This module raises an ImportError on Python 2. from distutils.util import Mixin2to3 as _Mixin2to3 from distutils import log from lib2to3.refactor import RefactoringTool, get_fixers_from_package + import setuptools + class DistutilsRefactoringTool(RefactoringTool): + def log_error(self, msg, *args, **kw): log.error(msg, *args) @@ -22,15 +25,17 @@ class DistutilsRefactoringTool(RefactoringTool): def log_debug(self, msg, *args): log.debug(msg, *args) + class Mixin2to3(_Mixin2to3): - def run_2to3(self, files, doctests = False): + + def run_2to3(self, files, doctests=False): # See of the distribution option has been set, otherwise check the # setuptools default. if self.distribution.use_2to3 is not True: return if not files: return - log.info("Fixing "+" ".join(files)) + log.info("Fixing " + " ".join(files)) self.__build_fixer_names() self.__exclude_fixers() if doctests: @@ -41,7 +46,8 @@ class Mixin2to3(_Mixin2to3): _Mixin2to3.run_2to3(self, files) def __build_fixer_names(self): - if self.fixer_names: return + if self.fixer_names: + return self.fixer_names = [] for p in setuptools.lib2to3_fixer_packages: self.fixer_names.extend(get_fixers_from_package(p)) diff --git a/setuptools/monkey.py b/setuptools/monkey.py new file mode 100644 index 00000000..43b97b4d --- /dev/null +++ b/setuptools/monkey.py @@ -0,0 +1,187 @@ +""" +Monkey patching of distutils. +""" + +import sys +import distutils.filelist +import platform +import types +import functools + +from .py26compat import import_module +from setuptools.extern import six + +import setuptools + + +__all__ = [] +""" +Everything is private. Contact the project team +if you think you need this functionality. +""" + + +def get_unpatched(item): + lookup = ( + get_unpatched_class if isinstance(item, six.class_types) else + get_unpatched_function if isinstance(item, types.FunctionType) else + lambda item: None + ) + return lookup(item) + + +def get_unpatched_class(cls): + """Protect against re-patching the distutils if reloaded + + Also ensures that no other distutils extension monkeypatched the distutils + first. + """ + while cls.__module__.startswith('setuptools'): + cls, = cls.__bases__ + if not cls.__module__.startswith('distutils'): + msg = "distutils has already been patched by %r" % cls + raise AssertionError(msg) + return cls + + +def patch_all(): + # we can't patch distutils.cmd, alas + distutils.core.Command = setuptools.Command + + has_issue_12885 = ( + sys.version_info < (3, 4, 6) + or + (3, 5) < sys.version_info <= (3, 5, 3) + or + (3, 6) < sys.version_info + ) + + if has_issue_12885: + # fix findall bug in distutils (http://bugs.python.org/issue12885) + distutils.filelist.findall = setuptools.findall + + needs_warehouse = ( + sys.version_info < (2, 7, 13) + or + (3, 0) < sys.version_info < (3, 3, 7) + or + (3, 4) < sys.version_info < (3, 4, 6) + or + (3, 5) < sys.version_info <= (3, 5, 3) + or + (3, 6) < sys.version_info + ) + + if needs_warehouse: + warehouse = 'https://upload.pypi.org/legacy/' + distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse + + _patch_distribution_metadata_write_pkg_file() + _patch_distribution_metadata_write_pkg_info() + + # Install Distribution throughout the distutils + for module in distutils.dist, distutils.core, distutils.cmd: + module.Distribution = setuptools.dist.Distribution + + # Install the patched Extension + distutils.core.Extension = setuptools.extension.Extension + distutils.extension.Extension = setuptools.extension.Extension + if 'distutils.command.build_ext' in sys.modules: + sys.modules['distutils.command.build_ext'].Extension = ( + setuptools.extension.Extension + ) + + patch_for_msvc_specialized_compiler() + + +def _patch_distribution_metadata_write_pkg_file(): + """Patch write_pkg_file to also write Requires-Python/Requires-External""" + distutils.dist.DistributionMetadata.write_pkg_file = ( + setuptools.dist.write_pkg_file + ) + + +def _patch_distribution_metadata_write_pkg_info(): + """ + Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local + encoding to save the pkg_info. Monkey-patch its write_pkg_info method to + correct this undesirable behavior. + """ + environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) + if not environment_local: + return + + distutils.dist.DistributionMetadata.write_pkg_info = ( + setuptools.dist.write_pkg_info + ) + + +def patch_func(replacement, target_mod, func_name): + """ + Patch func_name in target_mod with replacement + + Important - original must be resolved by name to avoid + patching an already patched function. + """ + original = getattr(target_mod, func_name) + + # set the 'unpatched' attribute on the replacement to + # point to the original. + vars(replacement).setdefault('unpatched', original) + + # replace the function in the original module + setattr(target_mod, func_name, replacement) + + +def get_unpatched_function(candidate): + return getattr(candidate, 'unpatched') + + +def patch_for_msvc_specialized_compiler(): + """ + Patch functions in distutils to use standalone Microsoft Visual C++ + compilers. + """ + # import late to avoid circular imports on Python < 3.5 + msvc = import_module('setuptools.msvc') + + if platform.system() != 'Windows': + # Compilers only availables on Microsoft Windows + return + + def patch_params(mod_name, func_name): + """ + Prepare the parameters for patch_func to patch indicated function. + """ + repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_' + repl_name = repl_prefix + func_name.lstrip('_') + repl = getattr(msvc, repl_name) + mod = import_module(mod_name) + if not hasattr(mod, func_name): + raise ImportError(func_name) + return repl, mod, func_name + + # Python 2.7 to 3.4 + msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler') + + # Python 3.5+ + msvc14 = functools.partial(patch_params, 'distutils._msvccompiler') + + try: + # Patch distutils.msvc9compiler + patch_func(*msvc9('find_vcvarsall')) + patch_func(*msvc9('query_vcvarsall')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler._get_vc_env + patch_func(*msvc14('_get_vc_env')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler.gen_lib_options for Numpy + patch_func(*msvc14('gen_lib_options')) + except ImportError: + pass diff --git a/setuptools/msvc.py b/setuptools/msvc.py new file mode 100644 index 00000000..e9665e10 --- /dev/null +++ b/setuptools/msvc.py @@ -0,0 +1,1189 @@ +""" +Improved support for Microsoft Visual C++ compilers. + +Known supported compilers: +-------------------------- +Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64); + Microsoft Windows SDK 7.0 (x86, x64, ia64); + Microsoft Windows SDK 6.1 (x86, x64, ia64) + +Microsoft Visual C++ 10.0: + Microsoft Windows SDK 7.1 (x86, x64, ia64) + +Microsoft Visual C++ 14.0: + Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) +""" + +import os +import sys +import platform +import itertools +import distutils.errors +from pkg_resources.extern.packaging.version import LegacyVersion + +from setuptools.extern.six.moves import filterfalse + +from .monkey import get_unpatched + +if platform.system() == 'Windows': + from setuptools.extern.six.moves import winreg + safe_env = os.environ +else: + """ + Mock winreg and environ so the module can be imported + on this platform. + """ + class winreg: + HKEY_USERS = None + HKEY_CURRENT_USER = None + HKEY_LOCAL_MACHINE = None + HKEY_CLASSES_ROOT = None + safe_env = dict() + +try: + from distutils.msvc9compiler import Reg +except ImportError: + pass + + +def msvc9_find_vcvarsall(version): + """ + Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone + compiler build for Python (VCForPython). Fall back to original behavior + when the standalone compiler is not available. + + Redirect the path of "vcvarsall.bat". + + Known supported compilers + ------------------------- + Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) + + Parameters + ---------- + version: float + Required Microsoft Visual C++ version. + + Return + ------ + vcvarsall.bat path: str + """ + VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' + key = VC_BASE % ('', version) + try: + # Per-user installs register the compiler path here + productdir = Reg.get_value(key, "installdir") + except KeyError: + try: + # All-user installs on a 64-bit system register here + key = VC_BASE % ('Wow6432Node\\', version) + productdir = Reg.get_value(key, "installdir") + except KeyError: + productdir = None + + if productdir: + vcvarsall = os.path.os.path.join(productdir, "vcvarsall.bat") + if os.path.isfile(vcvarsall): + return vcvarsall + + return get_unpatched(msvc9_find_vcvarsall)(version) + + +def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): + """ + Patched "distutils.msvc9compiler.query_vcvarsall" for support standalones + compilers. + + Set environment without use of "vcvarsall.bat". + + Known supported compilers + ------------------------- + Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64); + Microsoft Windows SDK 7.0 (x86, x64, ia64); + Microsoft Windows SDK 6.1 (x86, x64, ia64) + + Microsoft Visual C++ 10.0: + Microsoft Windows SDK 7.1 (x86, x64, ia64) + + Parameters + ---------- + ver: float + Required Microsoft Visual C++ version. + arch: str + Target architecture. + + Return + ------ + environment: dict + """ + # Try to get environement from vcvarsall.bat (Classical way) + try: + orig = get_unpatched(msvc9_query_vcvarsall) + return orig(ver, arch, *args, **kwargs) + except distutils.errors.DistutilsPlatformError: + # Pass error if Vcvarsall.bat is missing + pass + except ValueError: + # Pass error if environment not set after executing vcvarsall.bat + pass + + # If error, try to set environment directly + try: + return EnvironmentInfo(arch, ver).return_env() + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, ver, arch) + raise + + +def msvc14_get_vc_env(plat_spec): + """ + Patched "distutils._msvccompiler._get_vc_env" for support standalones + compilers. + + Set environment without use of "vcvarsall.bat". + + Known supported compilers + ------------------------- + Microsoft Visual C++ 14.0: + Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) + + Parameters + ---------- + plat_spec: str + Target architecture. + + Return + ------ + environment: dict + """ + # Try to get environment from vcvarsall.bat (Classical way) + try: + return get_unpatched(msvc14_get_vc_env)(plat_spec) + except distutils.errors.DistutilsPlatformError: + # Pass error Vcvarsall.bat is missing + pass + + # If error, try to set environment directly + try: + return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env() + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, 14.0) + raise + + +def msvc14_gen_lib_options(*args, **kwargs): + """ + Patched "distutils._msvccompiler.gen_lib_options" for fix + compatibility between "numpy.distutils" and "distutils._msvccompiler" + (for Numpy < 1.11.2) + """ + if "numpy.distutils" in sys.modules: + import numpy as np + if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'): + return np.distutils.ccompiler.gen_lib_options(*args, **kwargs) + return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs) + + +def _augment_exception(exc, version, arch=''): + """ + Add details to the exception message to help guide the user + as to what action will resolve it. + """ + # Error if MSVC++ directory not found or environment not set + message = exc.args[0] + + if "vcvarsall" in message.lower() or "visual c" in message.lower(): + # Special error message if MSVC++ not installed + tmpl = 'Microsoft Visual C++ {version:0.1f} is required.' + message = tmpl.format(**locals()) + msdownload = 'www.microsoft.com/download/details.aspx?id=%d' + if version == 9.0: + if arch.lower().find('ia64') > -1: + # For VC++ 9.0, if IA64 support is needed, redirect user + # to Windows SDK 7.0 + message += ' Get it with "Microsoft Windows SDK 7.0": ' + message += msdownload % 3138 + else: + # For VC++ 9.0 redirect user to Vc++ for Python 2.7 : + # This redirection link is maintained by Microsoft. + # Contact vspython@microsoft.com if it needs updating. + message += ' Get it from http://aka.ms/vcpython27' + elif version == 10.0: + # For VC++ 10.0 Redirect user to Windows SDK 7.1 + message += ' Get it with "Microsoft Windows SDK 7.1": ' + message += msdownload % 8279 + elif version >= 14.0: + # For VC++ 14.0 Redirect user to Visual C++ Build Tools + message += (' Get it with "Microsoft Visual C++ Build Tools": ' + r'http://landinghub.visualstudio.com/' + 'visual-cpp-build-tools') + + exc.args = (message, ) + + +class PlatformInfo: + """ + Current and Target Architectures informations. + + Parameters + ---------- + arch: str + Target architecture. + """ + current_cpu = safe_env.get('processor_architecture', '').lower() + + def __init__(self, arch): + self.arch = arch.lower().replace('x64', 'amd64') + + @property + def target_cpu(self): + return self.arch[self.arch.find('_') + 1:] + + def target_is_x86(self): + return self.target_cpu == 'x86' + + def current_is_x86(self): + return self.current_cpu == 'x86' + + def current_dir(self, hidex86=False, x64=False): + """ + Current platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + subfolder: str + '\target', or '' (see hidex86 parameter) + """ + return ( + '' if (self.current_cpu == 'x86' and hidex86) else + r'\x64' if (self.current_cpu == 'amd64' and x64) else + r'\%s' % self.current_cpu + ) + + def target_dir(self, hidex86=False, x64=False): + """ + Target platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + subfolder: str + '\current', or '' (see hidex86 parameter) + """ + return ( + '' if (self.target_cpu == 'x86' and hidex86) else + r'\x64' if (self.target_cpu == 'amd64' and x64) else + r'\%s' % self.target_cpu + ) + + def cross_dir(self, forcex86=False): + """ + Cross platform specific subfolder. + + Parameters + ---------- + forcex86: bool + Use 'x86' as current architecture even if current acritecture is + not x86. + + Return + ------ + subfolder: str + '' if target architecture is current architecture, + '\current_target' if not. + """ + current = 'x86' if forcex86 else self.current_cpu + return ( + '' if self.target_cpu == current else + self.target_dir().replace('\\', '\\%s_' % current) + ) + + +class RegistryInfo: + """ + Microsoft Visual Studio related registry informations. + + Parameters + ---------- + platform_info: PlatformInfo + "PlatformInfo" instance. + """ + HKEYS = (winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT) + + def __init__(self, platform_info): + self.pi = platform_info + + @property + def visualstudio(self): + """ + Microsoft Visual Studio root registry key. + """ + return 'VisualStudio' + + @property + def sxs(self): + """ + Microsoft Visual Studio SxS registry key. + """ + return os.path.join(self.visualstudio, 'SxS') + + @property + def vc(self): + """ + Microsoft Visual C++ VC7 registry key. + """ + return os.path.join(self.sxs, 'VC7') + + @property + def vs(self): + """ + Microsoft Visual Studio VS7 registry key. + """ + return os.path.join(self.sxs, 'VS7') + + @property + def vc_for_python(self): + """ + Microsoft Visual C++ for Python registry key. + """ + return r'DevDiv\VCForPython' + + @property + def microsoft_sdk(self): + """ + Microsoft SDK registry key. + """ + return 'Microsoft SDKs' + + @property + def windows_sdk(self): + """ + Microsoft Windows/Platform SDK registry key. + """ + return os.path.join(self.microsoft_sdk, 'Windows') + + @property + def netfx_sdk(self): + """ + Microsoft .NET Framework SDK registry key. + """ + return os.path.join(self.microsoft_sdk, 'NETFXSDK') + + @property + def windows_kits_roots(self): + """ + Microsoft Windows Kits Roots registry key. + """ + return r'Windows Kits\Installed Roots' + + def microsoft(self, key, x86=False): + """ + Return key in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + x86: str + Force x86 software registry. + + Return + ------ + str: value + """ + node64 = '' if self.pi.current_is_x86() or x86 else r'\Wow6432Node' + return os.path.join('Software', node64, 'Microsoft', key) + + def lookup(self, key, name): + """ + Look for values in registry in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + name: str + Value name to find. + + Return + ------ + str: value + """ + KEY_READ = winreg.KEY_READ + openkey = winreg.OpenKey + ms = self.microsoft + for hkey in self.HKEYS: + try: + bkey = openkey(hkey, ms(key), 0, KEY_READ) + except (OSError, IOError): + if not self.pi.current_is_x86(): + try: + bkey = openkey(hkey, ms(key, True), 0, KEY_READ) + except (OSError, IOError): + continue + else: + continue + try: + return winreg.QueryValueEx(bkey, name)[0] + except (OSError, IOError): + pass + + +class SystemInfo: + """ + Microsoft Windows and Visual Studio related system inormations. + + Parameters + ---------- + registry_info: RegistryInfo + "RegistryInfo" instance. + vc_ver: float + Required Microsoft Visual C++ version. + """ + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparaison. + WinDir = safe_env.get('WinDir', '') + ProgramFiles = safe_env.get('ProgramFiles', '') + ProgramFilesx86 = safe_env.get('ProgramFiles(x86)', ProgramFiles) + + def __init__(self, registry_info, vc_ver=None): + self.ri = registry_info + self.pi = self.ri.pi + if vc_ver: + self.vc_ver = vc_ver + else: + try: + self.vc_ver = self.find_available_vc_vers()[-1] + except IndexError: + err = 'No Microsoft Visual C++ version found' + raise distutils.errors.DistutilsPlatformError(err) + + def find_available_vc_vers(self): + """ + Find all available Microsoft Visual C++ versions. + """ + vckeys = (self.ri.vc, self.ri.vc_for_python) + vc_vers = [] + for hkey in self.ri.HKEYS: + for key in vckeys: + try: + bkey = winreg.OpenKey(hkey, key, 0, winreg.KEY_READ) + except (OSError, IOError): + continue + subkeys, values, _ = winreg.QueryInfoKey(bkey) + for i in range(values): + try: + ver = float(winreg.EnumValue(bkey, i)[0]) + if ver not in vc_vers: + vc_vers.append(ver) + except ValueError: + pass + for i in range(subkeys): + try: + ver = float(winreg.EnumKey(bkey, i)) + if ver not in vc_vers: + vc_vers.append(ver) + except ValueError: + pass + return sorted(vc_vers) + + @property + def VSInstallDir(self): + """ + Microsoft Visual Studio directory. + """ + # Default path + name = 'Microsoft Visual Studio %0.1f' % self.vc_ver + default = os.path.join(self.ProgramFilesx86, name) + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default + + @property + def VCInstallDir(self): + """ + Microsoft Visual C++ directory. + """ + # Default path + default = r'Microsoft Visual Studio %0.1f\VC' % self.vc_ver + guess_vc = os.path.join(self.ProgramFilesx86, default) + + # Try to get "VC++ for Python" path from registry as default path + reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) + python_vc = self.ri.lookup(reg_path, 'installdir') + default_vc = os.path.join(python_vc, 'VC') if python_vc else guess_vc + + # Try to get path from registry, if fail use default path + path = self.ri.lookup(self.ri.vc, '%0.1f' % self.vc_ver) or default_vc + + if not os.path.isdir(path): + msg = 'Microsoft Visual C++ directory not found' + raise distutils.errors.DistutilsPlatformError(msg) + + return path + + @property + def WindowsSdkVersion(self): + """ + Microsoft Windows SDK versions. + """ + # Set Windows SDK versions for specified MSVC++ version + if self.vc_ver <= 9.0: + return ('7.0', '6.1', '6.0a') + elif self.vc_ver == 10.0: + return ('7.1', '7.0a') + elif self.vc_ver == 11.0: + return ('8.0', '8.0a') + elif self.vc_ver == 12.0: + return ('8.1', '8.1a') + elif self.vc_ver >= 14.0: + return ('10.0', '8.1') + + @property + def WindowsSdkDir(self): + """ + Microsoft Windows SDK directory. + """ + sdkdir = '' + for ver in self.WindowsSdkVersion: + # Try to get it from registry + loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver) + sdkdir = self.ri.lookup(loc, 'installationfolder') + if sdkdir: + break + if not sdkdir or not os.path.isdir(sdkdir): + # Try to get "VC++ for Python" version from registry + path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) + install_base = self.ri.lookup(path, 'installdir') + if install_base: + sdkdir = os.path.join(install_base, 'WinSDK') + if not sdkdir or not os.path.isdir(sdkdir): + # If fail, use default new path + for ver in self.WindowsSdkVersion: + intver = ver[:ver.rfind('.')] + path = r'Microsoft SDKs\Windows Kits\%s' % (intver) + d = os.path.join(self.ProgramFiles, path) + if os.path.isdir(d): + sdkdir = d + if not sdkdir or not os.path.isdir(sdkdir): + # If fail, use default old path + for ver in self.WindowsSdkVersion: + path = r'Microsoft SDKs\Windows\v%s' % ver + d = os.path.join(self.ProgramFiles, path) + if os.path.isdir(d): + sdkdir = d + if not sdkdir: + # If fail, use Platform SDK + sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK') + return sdkdir + + @property + def WindowsSDKExecutablePath(self): + """ + Microsoft Windows SDK executable directory. + """ + # Find WinSDK NetFx Tools registry dir name + if self.vc_ver <= 11.0: + netfxver = 35 + arch = '' + else: + netfxver = 40 + hidex86 = True if self.vc_ver <= 12.0 else False + arch = self.pi.current_dir(x64=True, hidex86=hidex86) + fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-')) + + # liste all possibles registry paths + regpaths = [] + if self.vc_ver >= 14.0: + for ver in self.NetFxSdkVersion: + regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)] + + for ver in self.WindowsSdkVersion: + regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)] + + # Return installation folder from the more recent path + for path in regpaths: + execpath = self.ri.lookup(path, 'installationfolder') + if execpath: + break + return execpath + + @property + def FSharpInstallDir(self): + """ + Microsoft Visual F# directory. + """ + path = r'%0.1f\Setup\F#' % self.vc_ver + path = os.path.join(self.ri.visualstudio, path) + return self.ri.lookup(path, 'productdir') or '' + + @property + def UniversalCRTSdkDir(self): + """ + Microsoft Universal CRT SDK directory. + """ + # Set Kit Roots versions for specified MSVC++ version + if self.vc_ver >= 14.0: + vers = ('10', '81') + else: + vers = () + + # Find path of the more recent Kit + for ver in vers: + sdkdir = self.ri.lookup(self.ri.windows_kits_roots, + 'kitsroot%s' % ver) + if sdkdir: + break + return sdkdir or '' + + @property + def NetFxSdkVersion(self): + """ + Microsoft .NET Framework SDK versions. + """ + # Set FxSdk versions for specified MSVC++ version + if self.vc_ver >= 14.0: + return ('4.6.1', '4.6') + else: + return () + + @property + def NetFxSdkDir(self): + """ + Microsoft .NET Framework SDK directory. + """ + for ver in self.NetFxSdkVersion: + loc = os.path.join(self.ri.netfx_sdk, ver) + sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') + if sdkdir: + break + return sdkdir or '' + + @property + def FrameworkDir32(self): + """ + Microsoft .NET Framework 32bit directory. + """ + # Default path + guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw + + @property + def FrameworkDir64(self): + """ + Microsoft .NET Framework 64bit directory. + """ + # Default path + guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw + + @property + def FrameworkVersion32(self): + """ + Microsoft .NET Framework 32bit versions. + """ + return self._find_dot_net_versions(32) + + @property + def FrameworkVersion64(self): + """ + Microsoft .NET Framework 64bit versions. + """ + return self._find_dot_net_versions(64) + + def _find_dot_net_versions(self, bits=32): + """ + Find Microsoft .NET Framework versions. + + Parameters + ---------- + bits: int + Platform number of bits: 32 or 64. + """ + # Find actual .NET version + ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits) or '' + + # Set .NET versions for specified MSVC++ version + if self.vc_ver >= 12.0: + frameworkver = (ver, 'v4.0') + elif self.vc_ver >= 10.0: + frameworkver = ('v4.0.30319' if ver.lower()[:2] != 'v4' else ver, + 'v3.5') + elif self.vc_ver == 9.0: + frameworkver = ('v3.5', 'v2.0.50727') + if self.vc_ver == 8.0: + frameworkver = ('v3.0', 'v2.0.50727') + return frameworkver + + +class EnvironmentInfo: + """ + Return environment variables for specified Microsoft Visual C++ version + and platform : Lib, Include, Path and libpath. + + This function is compatible with Microsoft Visual C++ 9.0 to 14.0. + + Script created by analysing Microsoft environment configuration files like + "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ... + + Parameters + ---------- + arch: str + Target architecture. + vc_ver: float + Required Microsoft Visual C++ version. If not set, autodetect the last + version. + vc_min_ver: float + Minimum Microsoft Visual C++ version. + """ + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparaison. + + def __init__(self, arch, vc_ver=None, vc_min_ver=None): + self.pi = PlatformInfo(arch) + self.ri = RegistryInfo(self.pi) + self.si = SystemInfo(self.ri, vc_ver) + + if vc_min_ver: + if self.vc_ver < vc_min_ver: + err = 'No suitable Microsoft Visual C++ version found' + raise distutils.errors.DistutilsPlatformError(err) + + @property + def vc_ver(self): + """ + Microsoft Visual C++ version. + """ + return self.si.vc_ver + + @property + def VSTools(self): + """ + Microsoft Visual Studio Tools + """ + paths = [r'Common7\IDE', r'Common7\Tools'] + + if self.vc_ver >= 14.0: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow'] + paths += [r'Team Tools\Performance Tools'] + paths += [r'Team Tools\Performance Tools%s' % arch_subdir] + + return [os.path.join(self.si.VSInstallDir, path) for path in paths] + + @property + def VCIncludes(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Includes + """ + return [os.path.join(self.si.VCInstallDir, 'Include'), + os.path.join(self.si.VCInstallDir, r'ATLMFC\Include')] + + @property + def VCLibraries(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Libraries + """ + arch_subdir = self.pi.target_dir(hidex86=True) + paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir] + + if self.vc_ver >= 14.0: + paths += [r'Lib\store%s' % arch_subdir] + + return [os.path.join(self.si.VCInstallDir, path) for path in paths] + + @property + def VCStoreRefs(self): + """ + Microsoft Visual C++ store references Libraries + """ + if self.vc_ver < 14.0: + return [] + return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')] + + @property + def VCTools(self): + """ + Microsoft Visual C++ Tools + """ + si = self.si + tools = [os.path.join(si.VCInstallDir, 'VCPackages')] + + forcex86 = True if self.vc_ver <= 10.0 else False + arch_subdir = self.pi.cross_dir(forcex86) + if arch_subdir: + tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)] + + if self.vc_ver >= 14.0: + path = 'Bin%s' % self.pi.current_dir(hidex86=True) + tools += [os.path.join(si.VCInstallDir, path)] + + else: + tools += [os.path.join(si.VCInstallDir, 'Bin')] + + return tools + + @property + def OSLibraries(self): + """ + Microsoft Windows SDK Libraries + """ + if self.vc_ver <= 10.0: + arch_subdir = self.pi.target_dir(hidex86=True, x64=True) + return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] + + else: + arch_subdir = self.pi.target_dir(x64=True) + lib = os.path.join(self.si.WindowsSdkDir, 'lib') + libver = self._get_content_dirname(lib) + return [os.path.join(lib, '%sum%s' % (libver, arch_subdir))] + + @property + def OSIncludes(self): + """ + Microsoft Windows SDK Include + """ + include = os.path.join(self.si.WindowsSdkDir, 'include') + + if self.vc_ver <= 10.0: + return [include, os.path.join(include, 'gl')] + + else: + if self.vc_ver >= 14.0: + sdkver = self._get_content_dirname(include) + else: + sdkver = '' + return [os.path.join(include, '%sshared' % sdkver), + os.path.join(include, '%sum' % sdkver), + os.path.join(include, '%swinrt' % sdkver)] + + @property + def OSLibpath(self): + """ + Microsoft Windows SDK Libraries Paths + """ + ref = os.path.join(self.si.WindowsSdkDir, 'References') + libpath = [] + + if self.vc_ver <= 9.0: + libpath += self.OSLibraries + + if self.vc_ver >= 11.0: + libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')] + + if self.vc_ver >= 14.0: + libpath += [ + ref, + os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'), + os.path.join( + ref, + 'Windows.Foundation.UniversalApiContract', + '1.0.0.0', + ), + os.path.join( + ref, + 'Windows.Foundation.FoundationContract', + '1.0.0.0', + ), + os.path.join( + ref, + 'Windows.Networking.Connectivity.WwanContract', + '1.0.0.0', + ), + os.path.join( + self.si.WindowsSdkDir, + 'ExtensionSDKs', + 'Microsoft.VCLibs', + '%0.1f' % self.vc_ver, + 'References', + 'CommonConfiguration', + 'neutral', + ), + ] + return libpath + + @property + def SdkTools(self): + """ + Microsoft Windows SDK Tools + """ + bin_dir = 'Bin' if self.vc_ver <= 11.0 else r'Bin\x86' + tools = [os.path.join(self.si.WindowsSdkDir, bin_dir)] + + if not self.pi.current_is_x86(): + arch_subdir = self.pi.current_dir(x64=True) + path = 'Bin%s' % arch_subdir + tools += [os.path.join(self.si.WindowsSdkDir, path)] + + if self.vc_ver == 10.0 or self.vc_ver == 11.0: + if self.pi.target_is_x86(): + arch_subdir = '' + else: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir + tools += [os.path.join(self.si.WindowsSdkDir, path)] + + if self.si.WindowsSDKExecutablePath: + tools += [self.si.WindowsSDKExecutablePath] + + return tools + + @property + def SdkSetup(self): + """ + Microsoft Windows SDK Setup + """ + if self.vc_ver > 9.0: + return [] + + return [os.path.join(self.si.WindowsSdkDir, 'Setup')] + + @property + def FxTools(self): + """ + Microsoft .NET Framework Tools + """ + pi = self.pi + si = self.si + + if self.vc_ver <= 10.0: + include32 = True + include64 = not pi.target_is_x86() and not pi.current_is_x86() + else: + include32 = pi.target_is_x86() or pi.current_is_x86() + include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64' + + tools = [] + if include32: + tools += [os.path.join(si.FrameworkDir32, ver) + for ver in si.FrameworkVersion32] + if include64: + tools += [os.path.join(si.FrameworkDir64, ver) + for ver in si.FrameworkVersion64] + return tools + + @property + def NetFxSDKLibraries(self): + """ + Microsoft .Net Framework SDK Libraries + """ + if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)] + + @property + def NetFxSDKIncludes(self): + """ + Microsoft .Net Framework SDK Includes + """ + if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + return [os.path.join(self.si.NetFxSdkDir, r'include\um')] + + @property + def VsTDb(self): + """ + Microsoft Visual Studio Team System Database + """ + return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')] + + @property + def MSBuild(self): + """ + Microsoft Build Engine + """ + if self.vc_ver < 12.0: + return [] + + arch_subdir = self.pi.current_dir(hidex86=True) + path = r'MSBuild\%0.1f\bin%s' % (self.vc_ver, arch_subdir) + return [os.path.join(self.si.ProgramFilesx86, path)] + + @property + def HTMLHelpWorkshop(self): + """ + Microsoft HTML Help Workshop + """ + if self.vc_ver < 11.0: + return [] + + return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')] + + @property + def UCRTLibraries(self): + """ + Microsoft Universal CRT Libraries + """ + if self.vc_ver < 14.0: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib') + ucrtver = self._get_content_dirname(lib) + return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))] + + @property + def UCRTIncludes(self): + """ + Microsoft Universal CRT Include + """ + if self.vc_ver < 14.0: + return [] + + include = os.path.join(self.si.UniversalCRTSdkDir, 'include') + ucrtver = self._get_content_dirname(include) + return [os.path.join(include, '%sucrt' % ucrtver)] + + @property + def FSharp(self): + """ + Microsoft Visual F# + """ + if self.vc_ver < 11.0 and self.vc_ver > 12.0: + return [] + + return self.si.FSharpInstallDir + + @property + def VCRuntimeRedist(self): + """ + Microsoft Visual C++ runtime redistribuable dll + """ + arch_subdir = self.pi.target_dir(x64=True) + vcruntime = 'redist%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll' + vcruntime = vcruntime % (arch_subdir, self.vc_ver, self.vc_ver) + return os.path.join(self.si.VCInstallDir, vcruntime) + + def return_env(self, exists=True): + """ + Return environment dict. + + Parameters + ---------- + exists: bool + It True, only return existing paths. + """ + env = dict( + include=self._build_paths('include', + [self.VCIncludes, + self.OSIncludes, + self.UCRTIncludes, + self.NetFxSDKIncludes], + exists), + lib=self._build_paths('lib', + [self.VCLibraries, + self.OSLibraries, + self.FxTools, + self.UCRTLibraries, + self.NetFxSDKLibraries], + exists), + libpath=self._build_paths('libpath', + [self.VCLibraries, + self.FxTools, + self.VCStoreRefs, + self.OSLibpath], + exists), + path=self._build_paths('path', + [self.VCTools, + self.VSTools, + self.VsTDb, + self.SdkTools, + self.SdkSetup, + self.FxTools, + self.MSBuild, + self.HTMLHelpWorkshop, + self.FSharp], + exists), + ) + if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist): + env['py_vcruntime_redist'] = self.VCRuntimeRedist + return env + + def _build_paths(self, name, spec_path_lists, exists): + """ + Given an environment variable name and specified paths, + return a pathsep-separated string of paths containing + unique, extant, directories from those paths and from + the environment variable. Raise an error if no paths + are resolved. + """ + # flatten spec_path_lists + spec_paths = itertools.chain.from_iterable(spec_path_lists) + env_paths = safe_env.get(name, '').split(os.pathsep) + paths = itertools.chain(spec_paths, env_paths) + extant_paths = list(filter(os.path.isdir, paths)) if exists else paths + if not extant_paths: + msg = "%s environment variable is empty" % name.upper() + raise distutils.errors.DistutilsPlatformError(msg) + unique_paths = self._unique_everseen(extant_paths) + return os.pathsep.join(unique_paths) + + # from Python docs + def _unique_everseen(self, iterable, key=None): + """ + List unique elements, preserving order. + Remember all elements ever seen. + + _unique_everseen('AAAABBBCCDAABBB') --> A B C D + + _unique_everseen('ABBCcAD', str.lower) --> A B C D + """ + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + def _get_content_dirname(self, path): + """ + Return name of the first dir in path or '' if no dir found. + + Parameters + ---------- + path: str + Path where search dir. + + Return + ------ + foldername: str + "name\" or "" + """ + try: + name = os.listdir(path) + if name: + return '%s\\' % name[0] + return '' + except (OSError, IOError): + return '' diff --git a/setuptools/msvc9_support.py b/setuptools/msvc9_support.py deleted file mode 100644 index 9d869580..00000000 --- a/setuptools/msvc9_support.py +++ /dev/null @@ -1,63 +0,0 @@ -try: - import distutils.msvc9compiler -except Exception: - pass - -unpatched = dict() - -def patch_for_specialized_compiler(): - """ - Patch functions in distutils.msvc9compiler to use the standalone compiler - build for Python (Windows only). Fall back to original behavior when the - standalone compiler is not available. - """ - if 'distutils' not in globals(): - # The module isn't available to be patched - return - - if unpatched: - # Already patched - return - - unpatched.update(vars(distutils.msvc9compiler)) - - distutils.msvc9compiler.find_vcvarsall = find_vcvarsall - distutils.msvc9compiler.query_vcvarsall = query_vcvarsall - -def find_vcvarsall(version): - Reg = distutils.msvc9compiler.Reg - VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' - key = VC_BASE % ('', version) - try: - # Per-user installs register the compiler path here - productdir = Reg.get_value(key, "installdir") - except KeyError: - try: - # All-user installs on a 64-bit system register here - key = VC_BASE % ('Wow6432Node\\', version) - productdir = Reg.get_value(key, "installdir") - except KeyError: - productdir = None - - if productdir: - import os - vcvarsall = os.path.join(productdir, "vcvarsall.bat") - if os.path.isfile(vcvarsall): - return vcvarsall - - return unpatched['find_vcvarsall'](version) - -def query_vcvarsall(version, *args, **kwargs): - try: - return unpatched['query_vcvarsall'](version, *args, **kwargs) - except distutils.errors.DistutilsPlatformError as exc: - if exc and "vcvarsall.bat" in exc.args[0]: - message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0]) - if int(version) == 9: - # This redirection link is maintained by Microsoft. - # Contact vspython@microsoft.com if it needs updating. - raise distutils.errors.DistutilsPlatformError( - message + ' Get it from http://aka.ms/vcpython27' - ) - raise distutils.errors.DistutilsPlatformError(message) - raise diff --git a/setuptools/package_index.py b/setuptools/package_index.py index e87504db..3e8d6818 100755 --- a/setuptools/package_index.py +++ b/setuptools/package_index.py @@ -37,7 +37,7 @@ PYPI_MD5 = re.compile( '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)' 'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)' ) -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match +URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() __all__ = [ @@ -52,6 +52,15 @@ _tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" user_agent = _tmpl.format(py_major=sys.version[:3], **globals()) +def parse_requirement_arg(spec): + try: + return Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % (spec,) + ) + + def parse_bdist_wininst(name): """Return (base,pyversion) or (None,None) for possible .exe name""" @@ -62,41 +71,45 @@ def parse_bdist_wininst(name): if lower.endswith('.win32.exe'): base = name[:-10] plat = 'win32' - elif lower.startswith('.win32-py',-16): + elif lower.startswith('.win32-py', -16): py_ver = name[-7:-4] base = name[:-16] plat = 'win32' elif lower.endswith('.win-amd64.exe'): base = name[:-14] plat = 'win-amd64' - elif lower.startswith('.win-amd64-py',-20): + elif lower.startswith('.win-amd64-py', -20): py_ver = name[-7:-4] base = name[:-20] plat = 'win-amd64' - return base,py_ver,plat + return base, py_ver, plat def egg_info_for_url(url): parts = urllib.parse.urlparse(url) scheme, server, path, parameters, query, fragment = parts base = urllib.parse.unquote(path.split('/')[-1]) - if server=='sourceforge.net' and base=='download': # XXX Yuck + if server == 'sourceforge.net' and base == 'download': # XXX Yuck base = urllib.parse.unquote(path.split('/')[-2]) - if '#' in base: base, fragment = base.split('#',1) - return base,fragment + if '#' in base: + base, fragment = base.split('#', 1) + return base, fragment + def distros_for_url(url, metadata=None): """Yield egg or source distribution objects that might be found at a URL""" base, fragment = egg_info_for_url(url) - for dist in distros_for_location(url, base, metadata): yield dist + for dist in distros_for_location(url, base, metadata): + yield dist if fragment: match = EGG_FRAGMENT.match(fragment) if match: for dist in interpret_distro_name( - url, match.group(1), metadata, precedence = CHECKOUT_DIST + url, match.group(1), metadata, precedence=CHECKOUT_DIST ): yield dist + def distros_for_location(location, basename, metadata=None): """Yield egg or source distribution objects based on basename""" if basename.endswith('.egg.zip'): @@ -118,6 +131,7 @@ def distros_for_location(location, basename, metadata=None): return interpret_distro_name(location, basename, metadata) return [] # no extension matched + def distros_for_filename(filename, metadata=None): """Yield possible egg or source distribution objects based on a filename""" return distros_for_location( @@ -152,13 +166,14 @@ def interpret_distro_name( # it is a bdist_dumb, not an sdist -- bail out return - for p in range(1,len(parts)+1): + for p in range(1, len(parts) + 1): yield Distribution( location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), - py_version=py_version, precedence = precedence, - platform = platform + py_version=py_version, precedence=precedence, + platform=platform ) + # From Python 2.7 docs def unique_everseen(iterable, key=None): "List unique elements, preserving order. Remember all elements ever seen." @@ -177,6 +192,7 @@ def unique_everseen(iterable, key=None): seen_add(k) yield element + def unique_values(func): """ Wrap a function returning an iterable such that the resulting iterable @@ -187,9 +203,11 @@ def unique_values(func): return unique_everseen(func(*args, **kwargs)) return wrapper + REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) # this line is here to fix emacs' cruddy broken syntax highlighting + @unique_values def find_external_links(url, page): """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" @@ -203,8 +221,8 @@ def find_external_links(url, page): for tag in ("<th>Home Page", "<th>Download URL"): pos = page.find(tag) - if pos!=-1: - match = HREF.search(page,pos) + if pos != -1: + match = HREF.search(page, pos) if match: yield urllib.parse.urljoin(url, htmldecode(match.group(1))) @@ -213,6 +231,7 @@ class ContentChecker(object): """ A null content checker that defines the interface for checking content """ + def feed(self, block): """ Feed a block of data to the hash. @@ -232,6 +251,7 @@ class ContentChecker(object): """ return + class HashChecker(ContentChecker): pattern = re.compile( r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)=' @@ -272,16 +292,22 @@ class PackageIndex(Environment): self, index_url="https://pypi.python.org/simple", hosts=('*',), ca_bundle=None, verify_ssl=True, *args, **kw ): - Environment.__init__(self,*args,**kw) - self.index_url = index_url + "/"[:not index_url.endswith('/')] + Environment.__init__(self, *args, **kw) + self.index_url = index_url + "/" [:not index_url.endswith('/')] self.scanned_urls = {} self.fetched_urls = {} self.package_pages = {} - self.allows = re.compile('|'.join(map(translate,hosts))).match + self.allows = re.compile('|'.join(map(translate, hosts))).match self.to_scan = [] - if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()): + use_ssl = ( + verify_ssl + and ssl_support.is_available + and (ca_bundle or ssl_support.find_ca_bundle()) + ) + if use_ssl: self.opener = ssl_support.opener_for(ca_bundle) - else: self.opener = urllib.request.urlopen + else: + self.opener = urllib.request.urlopen def process_url(self, url, retrieve=False): """Evaluate a URL as a possible download, and maybe retrieve it""" @@ -308,8 +334,10 @@ class PackageIndex(Environment): self.info("Reading %s", url) self.fetched_urls[url] = True # prevent multiple fetch attempts - f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url) - if f is None: return + tmpl = "Download error on %s: %%s -- Some packages may not be found!" + f = self.open_url(url, tmpl % url) + if f is None: + return self.fetched_urls[f.url] = True if 'html' not in f.headers.get('content-type', '').lower(): f.close() # not html, we can't process it @@ -317,7 +345,7 @@ class PackageIndex(Environment): base = f.url # handle redirects page = f.read() - if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. + if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. if isinstance(f, urllib.error.HTTPError): # Errors have no charset, assume latin1: charset = 'latin-1' @@ -328,7 +356,7 @@ class PackageIndex(Environment): for match in HREF.finditer(page): link = urllib.parse.urljoin(base, htmldecode(match.group(1))) self.process_url(link) - if url.startswith(self.index_url) and getattr(f,'code',None)!=404: + if url.startswith(self.index_url) and getattr(f, 'code', None) != 404: page = self.process_index(url, page) def process_filename(self, fn, nested=False): @@ -340,7 +368,7 @@ class PackageIndex(Environment): if os.path.isdir(fn) and not nested: path = os.path.realpath(fn) for item in os.listdir(path): - self.process_filename(os.path.join(path,item), True) + self.process_filename(os.path.join(path, item), True) dists = distros_for_filename(fn) if dists: @@ -349,7 +377,8 @@ class PackageIndex(Environment): def url_ok(self, url, fatal=False): s = URL_SCHEME(url) - if (s and s.group(1).lower()=='file') or self.allows(urllib.parse.urlparse(url)[1]): + is_file = s and s.group(1).lower() == 'file' + if is_file or self.allows(urllib.parse.urlparse(url)[1]): return True msg = ("\nNote: Bypassing %s (disallowed host; see " "http://bit.ly/1dg9ijs for details).\n") @@ -384,7 +413,7 @@ class PackageIndex(Environment): dist.precedence = SOURCE_DIST self.add(dist) - def process_index(self,url,page): + def process_index(self, url, page): """Process the contents of a PyPI page""" def scan(link): # Process a URL to see if it's for a package page @@ -392,11 +421,11 @@ class PackageIndex(Environment): parts = list(map( urllib.parse.unquote, link[len(self.index_url):].split('/') )) - if len(parts)==2 and '#' not in parts[1]: + if len(parts) == 2 and '#' not in parts[1]: # it's a package page, sanitize and index it pkg = safe_name(parts[0]) ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(),{})[link] = True + self.package_pages.setdefault(pkg.lower(), {})[link] = True return to_filename(pkg), to_filename(ver) return None, None @@ -415,13 +444,13 @@ class PackageIndex(Environment): base, frag = egg_info_for_url(new_url) if base.endswith('.py') and not frag: if ver: - new_url+='#egg=%s-%s' % (pkg,ver) + new_url += '#egg=%s-%s' % (pkg, ver) else: self.need_version_info(url) self.scan_url(new_url) return PYPI_MD5.sub( - lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page + lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page ) else: return "" # no sense double-scanning non-package pages @@ -434,24 +463,25 @@ class PackageIndex(Environment): def scan_all(self, msg=None, *args): if self.index_url not in self.fetched_urls: - if msg: self.warn(msg,*args) + if msg: + self.warn(msg, *args) self.info( "Scanning index of all packages (this may take a while)" ) self.scan_url(self.index_url) def find_packages(self, requirement): - self.scan_url(self.index_url + requirement.unsafe_name+'/') + self.scan_url(self.index_url + requirement.unsafe_name + '/') if not self.package_pages.get(requirement.key): # Fall back to safe version of the name - self.scan_url(self.index_url + requirement.project_name+'/') + self.scan_url(self.index_url + requirement.project_name + '/') if not self.package_pages.get(requirement.key): # We couldn't find the target package, so search the index page too self.not_found_in_index(requirement) - for url in list(self.package_pages.get(requirement.key,())): + for url in list(self.package_pages.get(requirement.key, ())): # scan each page that might be related to the desired package self.scan_url(url) @@ -462,7 +492,7 @@ class PackageIndex(Environment): if dist in requirement: return dist self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement,installer) + return super(PackageIndex, self).obtain(requirement, installer) def check_hash(self, checker, filename, tfp): """ @@ -527,27 +557,21 @@ class PackageIndex(Environment): of `tmpdir`, and the local filename is returned. Various errors may be raised if a problem occurs during downloading. """ - if not isinstance(spec,Requirement): + if not isinstance(spec, Requirement): scheme = URL_SCHEME(spec) if scheme: # It's a url, download it to tmpdir found = self._download_url(scheme.group(1), spec, tmpdir) base, fragment = egg_info_for_url(spec) if base.endswith('.py'): - found = self.gen_setup(found,fragment,tmpdir) + found = self.gen_setup(found, fragment, tmpdir) return found elif os.path.exists(spec): # Existing file or directory, just return it return spec else: - try: - spec = Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % - (spec,) - ) - return getattr(self.fetch_distribution(spec, tmpdir),'location',None) + spec = parse_requirement_arg(spec) + return getattr(self.fetch_distribution(spec, tmpdir), 'location', None) def fetch_distribution( self, requirement, tmpdir, force_scan=False, source=False, @@ -581,22 +605,24 @@ class PackageIndex(Environment): for dist in env[req.key]: - if dist.precedence==DEVELOP_DIST and not develop_ok: + if dist.precedence == DEVELOP_DIST and not develop_ok: if dist not in skipped: - self.warn("Skipping development or system egg: %s",dist) + self.warn("Skipping development or system egg: %s", dist) skipped[dist] = 1 continue - if dist in req and (dist.precedence<=SOURCE_DIST or not source): - return dist + if dist in req and (dist.precedence <= SOURCE_DIST or not source): + dist.download_location = self.download(dist.location, tmpdir) + if os.path.exists(dist.download_location): + return dist if force_scan: self.prescan() self.find_packages(requirement) dist = find(requirement) - if local_index is not None: - dist = dist or find(requirement, local_index) + if not dist and local_index is not None: + dist = find(requirement, local_index) if dist is None: if self.to_scan is not None: @@ -609,13 +635,13 @@ class PackageIndex(Environment): if dist is None: self.warn( - "No local packages or download links found for %s%s", + "No local packages or working download links found for %s%s", (source and "a source distribution of " or ""), requirement, ) else: self.info("Best match: %s", dist) - return dist.clone(location=self.download(dist.location, tmpdir)) + return dist.clone(location=dist.download_location) def fetch(self, requirement, tmpdir, force_scan=False, source=False): """Obtain a file suitable for fulfilling `requirement` @@ -625,7 +651,7 @@ class PackageIndex(Environment): ``location`` of the downloaded distribution instead of a distribution object. """ - dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) + dist = self.fetch_distribution(requirement, tmpdir, force_scan, source) if dist is not None: return dist.location return None @@ -637,7 +663,7 @@ class PackageIndex(Environment): interpret_distro_name(filename, match.group(1), None) if d.version ] or [] - if len(dists)==1: # unambiguous ``#egg`` fragment + if len(dists) == 1: # unambiguous ``#egg`` fragment basename = os.path.basename(filename) # Make sure the file has been downloaded to the temp dir. @@ -646,7 +672,7 @@ class PackageIndex(Environment): from setuptools.command.easy_install import samefile if not samefile(filename, dst): shutil.copy2(filename, dst) - filename=dst + filename = dst with open(os.path.join(tmpdir, 'setup.py'), 'w') as file: file.write( @@ -663,7 +689,7 @@ class PackageIndex(Environment): raise DistutilsError( "Can't unambiguously interpret project/version identifier %r; " "any dashes in the name or version should be escaped using " - "underscores. %r" % (fragment,dists) + "underscores. %r" % (fragment, dists) ) else: raise DistutilsError( @@ -672,6 +698,7 @@ class PackageIndex(Environment): ) dl_blocksize = 8192 + def _download_to(self, url, filename): self.info("Downloading %s", url) # Download the file @@ -681,7 +708,7 @@ class PackageIndex(Environment): fp = self.open_url(strip_fragment(url)) if isinstance(fp, urllib.error.HTTPError): raise DistutilsError( - "Can't download %s: %s %s" % (url, fp.code,fp.msg) + "Can't download %s: %s %s" % (url, fp.code, fp.msg) ) headers = fp.info() blocknum = 0 @@ -692,7 +719,7 @@ class PackageIndex(Environment): sizes = get_all_headers(headers, 'Content-Length') size = max(map(int, sizes)) self.reporthook(url, filename, blocknum, bs, size) - with open(filename,'wb') as tfp: + with open(filename, 'wb') as tfp: while True: block = fp.read(bs) if block: @@ -705,7 +732,8 @@ class PackageIndex(Environment): self.check_hash(checker, filename, tfp) return headers finally: - if fp: fp.close() + if fp: + fp.close() def reporthook(self, url, filename, blocknum, blksize, size): pass # no-op @@ -751,24 +779,24 @@ class PackageIndex(Environment): name, fragment = egg_info_for_url(url) if name: while '..' in name: - name = name.replace('..','.').replace('\\','_') + name = name.replace('..', '.').replace('\\', '_') else: name = "__downloaded__" # default if URL has no path contents if name.endswith('.egg.zip'): name = name[:-4] # strip the extra .zip before download - filename = os.path.join(tmpdir,name) + filename = os.path.join(tmpdir, name) # Download the file # - if scheme=='svn' or scheme.startswith('svn+'): + if scheme == 'svn' or scheme.startswith('svn+'): return self._download_svn(url, filename) - elif scheme=='git' or scheme.startswith('git+'): + elif scheme == 'git' or scheme.startswith('git+'): return self._download_git(url, filename) elif scheme.startswith('hg+'): return self._download_hg(url, filename) - elif scheme=='file': + elif scheme == 'file': return urllib.request.url2pathname(urllib.parse.urlparse(url)[2]) else: self.url_ok(url, True) # raises error if not allowed @@ -779,7 +807,7 @@ class PackageIndex(Environment): def _attempt_download(self, url, filename): headers = self._download_to(url, filename) - if 'html' in headers.get('content-type','').lower(): + if 'html' in headers.get('content-type', '').lower(): return self._download_html(url, headers, filename) else: return filename @@ -797,22 +825,22 @@ class PackageIndex(Environment): break # not an index page file.close() os.unlink(filename) - raise DistutilsError("Unexpected HTML page found at "+url) + raise DistutilsError("Unexpected HTML page found at " + url) def _download_svn(self, url, filename): - url = url.split('#',1)[0] # remove any fragment for svn's sake + url = url.split('#', 1)[0] # remove any fragment for svn's sake creds = '' if url.lower().startswith('svn:') and '@' in url: scheme, netloc, path, p, q, f = urllib.parse.urlparse(url) if not netloc and path.startswith('//') and '/' in path[2:]: - netloc, path = path[2:].split('/',1) + netloc, path = path[2:].split('/', 1) auth, host = splituser(netloc) if auth: if ':' in auth: - user, pw = auth.split(':',1) + user, pw = auth.split(':', 1) creds = " --username=%s --password=%s" % (user, pw) else: - creds = " --username="+auth + creds = " --username=" + auth netloc = host parts = scheme, netloc, url, p, q, f url = urllib.parse.urlunparse(parts) @@ -827,7 +855,7 @@ class PackageIndex(Environment): scheme = scheme.split('+', 1)[-1] # Some fragment identification fails - path = path.split('#',1)[0] + path = path.split('#', 1)[0] rev = None if '@' in path: @@ -839,7 +867,7 @@ class PackageIndex(Environment): return url, rev def _download_git(self, url, filename): - filename = filename.split('#',1)[0] + filename = filename.split('#', 1)[0] url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) self.info("Doing git clone from %s to %s", url, filename) @@ -855,7 +883,7 @@ class PackageIndex(Environment): return filename def _download_hg(self, url, filename): - filename = filename.split('#',1)[0] + filename = filename.split('#', 1)[0] url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) self.info("Doing hg clone from %s to %s", url, filename) @@ -879,16 +907,20 @@ class PackageIndex(Environment): def warn(self, msg, *args): log.warn(msg, *args) + # This pattern matches a character entity reference (a decimal numeric # references, a hexadecimal numeric reference, or a named reference). entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub + def uchr(c): if not isinstance(c, int): return c - if c>255: return six.unichr(c) + if c > 255: + return six.unichr(c) return chr(c) + def decode_entity(match): what = match.group(1) if what.startswith('#x'): @@ -899,10 +931,12 @@ def decode_entity(match): what = six.moves.html_entities.name2codepoint.get(what, match.group(0)) return uchr(what) + def htmldecode(text): """Decode HTML entities in the given text.""" return entity_sub(decode_entity, text) + def socket_timeout(timeout=15): def _socket_timeout(func): def _socket_timeout(*args, **kwargs): @@ -915,6 +949,7 @@ def socket_timeout(timeout=15): return _socket_timeout return _socket_timeout + def _encode_auth(auth): """ A function compatible with Python 2.3-3.3 that will encode @@ -935,12 +970,14 @@ def _encode_auth(auth): # convert back to a string encoded = encoded_bytes.decode() # strip the trailing carriage return - return encoded.replace('\n','') + return encoded.replace('\n', '') + class Credential(object): """ A username/password pair. Use like a namedtuple. """ + def __init__(self, username, password): self.username = username self.password = password @@ -952,6 +989,7 @@ class Credential(object): def __str__(self): return '%(username)s:%(password)s' % vars(self) + class PyPIConfig(configparser.RawConfigParser): def __init__(self): @@ -1011,7 +1049,7 @@ def open_with_auth(url, opener=urllib.request.urlopen): if cred: auth = str(cred) info = cred.username, url - log.info('Authenticating as %s for %s (from .pypirc)' % info) + log.info('Authenticating as %s for %s (from .pypirc)', *info) if auth: auth = "Basic " + _encode_auth(auth) @@ -1029,12 +1067,13 @@ def open_with_auth(url, opener=urllib.request.urlopen): # Put authentication info back into request URL if same host, # so that links found on the page will work s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url) - if s2==scheme and h2==host: + if s2 == scheme and h2 == host: parts = s2, netloc, path2, param2, query2, frag2 fp.url = urllib.parse.urlunparse(parts) return fp + # adding a timeout to avoid freezing package_index open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) @@ -1042,6 +1081,7 @@ open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) def fix_sf_url(url): return url # backward compatibility + def local_open(url): """Read a local path, with special support for directories""" scheme, server, path, param, query, frag = urllib.parse.urlparse(url) diff --git a/setuptools/py26compat.py b/setuptools/py26compat.py index 40cbb88e..5778cdf1 100644 --- a/setuptools/py26compat.py +++ b/setuptools/py26compat.py @@ -9,6 +9,7 @@ try: except ImportError: from urllib import splittag + def strip_fragment(url): """ In `Python 8280 <http://bugs.python.org/issue8280>`_, Python 2.7 and @@ -18,5 +19,13 @@ def strip_fragment(url): url, fragment = splittag(url) return url -if sys.version_info >= (2,7): + +if sys.version_info >= (2, 7): strip_fragment = lambda x: x + + +try: + from importlib import import_module +except ImportError: + def import_module(module_name): + return __import__(module_name, fromlist=['__name__']) diff --git a/setuptools/py27compat.py b/setuptools/py27compat.py index 702f7d65..57eb150b 100644 --- a/setuptools/py27compat.py +++ b/setuptools/py27compat.py @@ -4,12 +4,14 @@ Compatibility Support for Python 2.7 and earlier import sys + def get_all_headers(message, key): """ Given an HTTPMessage, return all headers matching a given key. """ return message.get_all(key) + if sys.version_info < (3,): def get_all_headers(message, key): return message.getheaders(key) diff --git a/setuptools/py31compat.py b/setuptools/py31compat.py index 8fe6dd9d..1e959e28 100644 --- a/setuptools/py31compat.py +++ b/setuptools/py31compat.py @@ -8,10 +8,11 @@ try: from sysconfig import get_config_vars, get_path except ImportError: from distutils.sysconfig import get_config_vars, get_python_lib + def get_path(name): if name not in ('platlib', 'purelib'): raise ValueError("Name must be purelib or platlib") - return get_python_lib(name=='platlib') + return get_python_lib(name == 'platlib') try: # Python >=3.2 @@ -19,14 +20,16 @@ try: except ImportError: import shutil import tempfile + class TemporaryDirectory(object): """ Very simple temporary directory context manager. Will try to delete afterward, but will also ignore OS and similar errors on deletion. """ + def __init__(self): - self.name = None # Handle mkdtemp raising an exception + self.name = None # Handle mkdtemp raising an exception self.name = tempfile.mkdtemp() def __enter__(self): @@ -35,7 +38,7 @@ except ImportError: def __exit__(self, exctype, excvalue, exctrace): try: shutil.rmtree(self.name, True) - except OSError: #removal errors are not the only possible + except OSError: # removal errors are not the only possible pass self.name = None diff --git a/setuptools/sandbox.py b/setuptools/sandbox.py index 23e296b1..2babb636 100755 --- a/setuptools/sandbox.py +++ b/setuptools/sandbox.py @@ -29,6 +29,7 @@ __all__ = [ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", ] + def _execfile(filename, globals, locals=None): """ Python 3 implementation of execfile. @@ -117,6 +118,7 @@ class ExceptionSaver: A Context Manager that will save an exception, serialized, and restore it later. """ + def __enter__(self): return self @@ -232,11 +234,12 @@ def run_setup(setup_script, args): setup_dir = os.path.abspath(os.path.dirname(setup_script)) with setup_context(setup_dir): try: - sys.argv[:] = [setup_script]+list(args) + sys.argv[:] = [setup_script] + list(args) sys.path.insert(0, setup_dir) # reset to include setup dir, w/clean callback list working_set.__init__() - working_set.callbacks.append(lambda dist:dist.activate()) + working_set.callbacks.append(lambda dist: dist.activate()) + def runner(): ns = dict(__file__=setup_script, __name__='__main__') _execfile(setup_script, ns) @@ -255,12 +258,12 @@ class AbstractSandbox: def __init__(self): self._attrs = [ name for name in dir(_os) - if not name.startswith('_') and hasattr(self,name) + if not name.startswith('_') and hasattr(self, name) ] def _copy(self, source): for name in self._attrs: - setattr(os, name, getattr(source,name)) + setattr(os, name, getattr(source, name)) def run(self, func): """Run 'func' under os sandboxing""" @@ -279,22 +282,25 @@ class AbstractSandbox: self._copy(_os) def _mk_dual_path_wrapper(name): - original = getattr(_os,name) - def wrap(self,src,dst,*args,**kw): + original = getattr(_os, name) + + def wrap(self, src, dst, *args, **kw): if self._active: - src,dst = self._remap_pair(name,src,dst,*args,**kw) - return original(src,dst,*args,**kw) + src, dst = self._remap_pair(name, src, dst, *args, **kw) + return original(src, dst, *args, **kw) return wrap for name in ["rename", "link", "symlink"]: - if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) + if hasattr(_os, name): + locals()[name] = _mk_dual_path_wrapper(name) def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os,name) - def wrap(self,path,*args,**kw): + original = original or getattr(_os, name) + + def wrap(self, path, *args, **kw): if self._active: - path = self._remap_input(name,path,*args,**kw) - return original(path,*args,**kw) + path = self._remap_input(name, path, *args, **kw) + return original(path, *args, **kw) return wrap if _file: @@ -305,49 +311,54 @@ class AbstractSandbox: "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", "startfile", "mkfifo", "mknod", "pathconf", "access" ]: - if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) + if hasattr(_os, name): + locals()[name] = _mk_single_path_wrapper(name) def _mk_single_with_return(name): - original = getattr(_os,name) - def wrap(self,path,*args,**kw): + original = getattr(_os, name) + + def wrap(self, path, *args, **kw): if self._active: - path = self._remap_input(name,path,*args,**kw) - return self._remap_output(name, original(path,*args,**kw)) - return original(path,*args,**kw) + path = self._remap_input(name, path, *args, **kw) + return self._remap_output(name, original(path, *args, **kw)) + return original(path, *args, **kw) return wrap for name in ['readlink', 'tempnam']: - if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) + if hasattr(_os, name): + locals()[name] = _mk_single_with_return(name) def _mk_query(name): - original = getattr(_os,name) - def wrap(self,*args,**kw): - retval = original(*args,**kw) + original = getattr(_os, name) + + def wrap(self, *args, **kw): + retval = original(*args, **kw) if self._active: return self._remap_output(name, retval) return retval return wrap for name in ['getcwd', 'tmpnam']: - if hasattr(_os,name): locals()[name] = _mk_query(name) + if hasattr(_os, name): + locals()[name] = _mk_query(name) - def _validate_path(self,path): + def _validate_path(self, path): """Called to remap or validate any path, whether input or output""" return path - def _remap_input(self,operation,path,*args,**kw): + def _remap_input(self, operation, path, *args, **kw): """Called for path inputs""" return self._validate_path(path) - def _remap_output(self,operation,path): + def _remap_output(self, operation, path): """Called for path outputs""" return self._validate_path(path) - def _remap_pair(self,operation,src,dst,*args,**kw): + def _remap_pair(self, operation, src, dst, *args, **kw): """Called for path pairs like rename, link, and symlink operations""" return ( - self._remap_input(operation+'-from',src,*args,**kw), - self._remap_input(operation+'-to',dst,*args,**kw) + self._remap_input(operation + '-from', src, *args, **kw), + self._remap_input(operation + '-to', dst, *args, **kw) ) @@ -364,6 +375,7 @@ except ImportError: # it appears pywin32 is not installed, so no need to exclude. pass + class DirectorySandbox(AbstractSandbox): """Restrict operations to a single subdirectory - pseudo-chroot""" @@ -380,7 +392,7 @@ class DirectorySandbox(AbstractSandbox): def __init__(self, sandbox, exceptions=_EXCEPTIONS): self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox,'') + self._prefix = os.path.join(self._sandbox, '') self._exceptions = [ os.path.normcase(os.path.realpath(path)) for path in exceptions @@ -395,12 +407,12 @@ class DirectorySandbox(AbstractSandbox): def _file(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("file", path, mode, *args, **kw) - return _file(path,mode,*args,**kw) + return _file(path, mode, *args, **kw) def _open(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("open", path, mode, *args, **kw) - return _open(path,mode,*args,**kw) + return _open(path, mode, *args, **kw) def tmpnam(self): self._violation("tmpnam") @@ -440,19 +452,21 @@ class DirectorySandbox(AbstractSandbox): """Called for path pairs like rename, link, and symlink operations""" if not self._ok(src) or not self._ok(dst): self._violation(operation, src, dst, *args, **kw) - return (src,dst) + return (src, dst) def open(self, file, flags, mode=0o777, *args, **kw): """Called for low-level os.open()""" if flags & WRITE_FLAGS and not self._ok(file): self._violation("os.open", file, flags, mode, *args, **kw) - return _os.open(file,flags,mode, *args, **kw) + return _os.open(file, flags, mode, *args, **kw) + WRITE_FLAGS = functools.reduce( operator.or_, [getattr(_os, a, 0) for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] ) + class SandboxViolation(DistutilsError): """A setup script attempted to modify the filesystem outside the sandbox""" @@ -468,29 +482,4 @@ script by hand. Please inform the package's author and the EasyInstall maintainers to find out if a fix or workaround is available.""" % self.args - - - - - - - - - - - - - - - - - - - - - - - - - # diff --git a/setuptools/site-patch.py b/setuptools/site-patch.py index c2168019..f09ab522 100644 --- a/setuptools/site-patch.py +++ b/setuptools/site-patch.py @@ -2,18 +2,17 @@ def __boot(): import sys import os PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): + if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH): PYTHONPATH = [] else: PYTHONPATH = PYTHONPATH.split(os.pathsep) - pic = getattr(sys,'path_importer_cache',{}) + pic = getattr(sys, 'path_importer_cache', {}) stdpath = sys.path[len(PYTHONPATH):] mydir = os.path.dirname(__file__) - #print "searching",stdpath,sys.path for item in stdpath: - if item==mydir or not item: + if item == mydir or not item: continue # skip if current dir. on Windows, or my own directory importer = pic.get(item) if importer is not None: @@ -24,26 +23,24 @@ def __boot(): break else: try: - import imp # Avoid import loop in Python >= 3.3 - stream, path, descr = imp.find_module('site',[item]) + import imp # Avoid import loop in Python >= 3.3 + stream, path, descr = imp.find_module('site', [item]) except ImportError: continue if stream is None: continue try: # This should actually reload the current module - imp.load_module('site',stream,path,descr) + imp.load_module('site', stream, path, descr) finally: stream.close() break else: raise ImportError("Couldn't find the real 'site' module") - #print "loaded", __file__ + known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp - known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys,'__egginsert',0) # save old insertion position + oldpos = getattr(sys, '__egginsert', 0) # save old insertion position sys.__egginsert = 0 # and reset the current one for item in PYTHONPATH: @@ -58,7 +55,7 @@ def __boot(): for item in sys.path: p, np = makepath(item) - if np==nd and insert_at is None: + if np == nd and insert_at is None: # We've hit the first 'system' path entry, so added entries go here insert_at = len(new_path) @@ -71,6 +68,7 @@ def __boot(): sys.path[:] = new_path -if __name__=='site': + +if __name__ == 'site': __boot() del __boot diff --git a/setuptools/ssl_support.py b/setuptools/ssl_support.py index 657197cf..f4ba8a92 100644 --- a/setuptools/ssl_support.py +++ b/setuptools/ssl_support.py @@ -161,6 +161,7 @@ class VerifyingHTTPSHandler(HTTPSHandler): class VerifyingHTTPSConn(HTTPSConnection): """Simple verifying connection: no auth, subclasses, timeouts, etc.""" + def __init__(self, host, ca_bundle, **kw): HTTPSConnection.__init__(self, host, **kw) self.ca_bundle = ca_bundle @@ -192,6 +193,7 @@ class VerifyingHTTPSConn(HTTPSConnection): self.sock.close() raise + def opener_for(ca_bundle=None): """Get a urlopen() replacement that uses ca_bundle for verification""" return urllib.request.build_opener( @@ -201,6 +203,7 @@ def opener_for(ca_bundle=None): _wincerts = None + def get_win_certfile(): global _wincerts if _wincerts is not None: @@ -212,6 +215,7 @@ def get_win_certfile(): return None class MyCertFile(CertFile): + def __init__(self, stores=(), certs=()): CertFile.__init__(self) for store in stores: @@ -231,7 +235,7 @@ def get_win_certfile(): def find_ca_bundle(): """Return an existing CA bundle path, or None""" - if os.name=='nt': + if os.name == 'nt': return get_win_certfile() else: for cert_path in cert_paths: diff --git a/setuptools/tests/__init__.py b/setuptools/tests/__init__.py index 32447356..53bd836c 100644 --- a/setuptools/tests/__init__.py +++ b/setuptools/tests/__init__.py @@ -27,7 +27,7 @@ def makeSetup(**args): distutils.core._setup_stop_after = "commandline" # Don't let system command line leak into tests! - args.setdefault('script_args',['install']) + args.setdefault('script_args', ['install']) try: return setuptools.setup(**args) @@ -40,6 +40,7 @@ needs_bytecode = pytest.mark.skipif( reason="bytecode support not available", ) + class TestDepends: def testExtractConst(self): @@ -55,35 +56,35 @@ class TestDepends: fc = six.get_function_code(f1) # unrecognized name - assert dep.extract_constant(fc,'q', -1) is None + assert dep.extract_constant(fc, 'q', -1) is None # constant assigned - dep.extract_constant(fc,'x', -1) == "test" + dep.extract_constant(fc, 'x', -1) == "test" # expression assigned - dep.extract_constant(fc,'y', -1) == -1 + dep.extract_constant(fc, 'y', -1) == -1 # recognized name, not assigned - dep.extract_constant(fc,'z', -1) is None + dep.extract_constant(fc, 'z', -1) is None def testFindModule(self): with pytest.raises(ImportError): dep.find_module('no-such.-thing') with pytest.raises(ImportError): dep.find_module('setuptools.non-existent') - f,p,i = dep.find_module('setuptools.tests') + f, p, i = dep.find_module('setuptools.tests') f.close() @needs_bytecode def testModuleExtract(self): from email import __version__ - assert dep.get_module_constant('email','__version__') == __version__ - assert dep.get_module_constant('sys','version') == sys.version - assert dep.get_module_constant('setuptools.tests','__doc__') == __doc__ + assert dep.get_module_constant('email', '__version__') == __version__ + assert dep.get_module_constant('sys', 'version') == sys.version + assert dep.get_module_constant('setuptools.tests', '__doc__') == __doc__ @needs_bytecode def testRequire(self): - req = Require('Email','1.0.3','email') + req = Require('Email', '1.0.3', 'email') assert req.name == 'Email' assert req.module == 'email' @@ -100,12 +101,12 @@ class TestDepends: assert req.is_present() assert req.is_current() - req = Require('Email 3000','03000','email',format=LooseVersion) + req = Require('Email 3000', '03000', 'email', format=LooseVersion) assert req.is_present() assert not req.is_current() assert not req.version_ok('unknown') - req = Require('Do-what-I-mean','1.0','d-w-i-m') + req = Require('Do-what-I-mean', '1.0', 'd-w-i-m') assert not req.is_present() assert not req.is_current() @@ -124,22 +125,22 @@ class TestDepends: class TestDistro: def setup_method(self, method): - self.e1 = Extension('bar.ext',['bar.c']) + self.e1 = Extension('bar.ext', ['bar.c']) self.e2 = Extension('c.y', ['y.c']) self.dist = makeSetup( packages=['a', 'a.b', 'a.b.c', 'b', 'c'], - py_modules=['b.d','x'], - ext_modules = (self.e1, self.e2), - package_dir = {}, + py_modules=['b.d', 'x'], + ext_modules=(self.e1, self.e2), + package_dir={}, ) def testDistroType(self): - assert isinstance(self.dist,setuptools.dist.Distribution) + assert isinstance(self.dist, setuptools.dist.Distribution) def testExcludePackage(self): self.dist.exclude_package('a') - assert self.dist.packages == ['b','c'] + assert self.dist.packages == ['b', 'c'] self.dist.exclude_package('b') assert self.dist.packages == ['c'] @@ -168,7 +169,7 @@ class TestDistro: assert self.dist.ext_modules == [self.e2, self.e1] def testExcludePackages(self): - self.dist.exclude(packages=['c','b','a']) + self.dist.exclude(packages=['c', 'b', 'a']) assert self.dist.packages == [] assert self.dist.py_modules == ['x'] assert self.dist.ext_modules == [self.e1] @@ -198,13 +199,13 @@ class TestDistro: with pytest.raises(DistutilsSetupError): self.dist.exclude(nonexistent_option='x') with pytest.raises(DistutilsSetupError): - self.dist.include(packages={'x':'y'}) + self.dist.include(packages={'x': 'y'}) with pytest.raises(DistutilsSetupError): - self.dist.exclude(packages={'x':'y'}) + self.dist.exclude(packages={'x': 'y'}) with pytest.raises(DistutilsSetupError): - self.dist.include(ext_modules={'x':'y'}) + self.dist.include(ext_modules={'x': 'y'}) with pytest.raises(DistutilsSetupError): - self.dist.exclude(ext_modules={'x':'y'}) + self.dist.exclude(ext_modules={'x': 'y'}) with pytest.raises(DistutilsSetupError): self.dist.include(package_dir=['q']) @@ -215,31 +216,31 @@ class TestDistro: class TestFeatures: def setup_method(self, method): - self.req = Require('Distutils','1.0.3','distutils') + self.req = Require('Distutils', '1.0.3', 'distutils') self.dist = makeSetup( features={ - 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), - 'bar': Feature("bar", standard=True, packages=['pkg.bar'], + 'foo': Feature("foo", standard=True, require_features=['baz', self.req]), + 'bar': Feature("bar", standard=True, packages=['pkg.bar'], py_modules=['bar_et'], remove=['bar.ext'], - ), + ), 'baz': Feature( "baz", optional=False, packages=['pkg.baz'], - scripts = ['scripts/baz_it'], - libraries=[('libfoo','foo/foofoo.c')] + scripts=['scripts/baz_it'], + libraries=[('libfoo', 'foo/foofoo.c')] ), 'dwim': Feature("DWIM", available=False, remove='bazish'), }, script_args=['--without-bar', 'install'], - packages = ['pkg.bar', 'pkg.foo'], - py_modules = ['bar_et', 'bazish'], - ext_modules = [Extension('bar.ext',['bar.c'])] + packages=['pkg.bar', 'pkg.foo'], + py_modules=['bar_et', 'bazish'], + ext_modules=[Extension('bar.ext', ['bar.c'])] ) def testDefaults(self): assert not Feature( - "test",standard=True,remove='x',available=False + "test", standard=True, remove='x', available=False ).include_by_default() - assert Feature("test",standard=True,remove='x').include_by_default() + assert Feature("test", standard=True, remove='x').include_by_default() # Feature must have either kwargs, removes, or require_features with pytest.raises(DistutilsSetupError): Feature("test") @@ -251,32 +252,32 @@ class TestFeatures: def testFeatureOptions(self): dist = self.dist assert ( - ('with-dwim',None,'include DWIM') in dist.feature_options + ('with-dwim', None, 'include DWIM') in dist.feature_options ) assert ( - ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options + ('without-dwim', None, 'exclude DWIM (default)') in dist.feature_options ) assert ( - ('with-bar',None,'include bar (default)') in dist.feature_options + ('with-bar', None, 'include bar (default)') in dist.feature_options ) assert ( - ('without-bar',None,'exclude bar') in dist.feature_options + ('without-bar', None, 'exclude bar') in dist.feature_options ) assert dist.feature_negopt['without-foo'] == 'with-foo' assert dist.feature_negopt['without-bar'] == 'with-bar' assert dist.feature_negopt['without-dwim'] == 'with-dwim' - assert (not 'without-baz' in dist.feature_negopt) + assert ('without-baz' not in dist.feature_negopt) def testUseFeatures(self): dist = self.dist assert dist.with_foo == 1 assert dist.with_bar == 0 assert dist.with_baz == 1 - assert (not 'bar_et' in dist.py_modules) - assert (not 'pkg.bar' in dist.packages) + assert ('bar_et' not in dist.py_modules) + assert ('pkg.bar' not in dist.packages) assert ('pkg.baz' in dist.packages) assert ('scripts/baz_it' in dist.scripts) - assert (('libfoo','foo/foofoo.c') in dist.libraries) + assert (('libfoo', 'foo/foofoo.c') in dist.libraries) assert dist.ext_modules == [] assert dist.require_features == [self.req] @@ -287,7 +288,8 @@ class TestFeatures: def testFeatureWithInvalidRemove(self): with pytest.raises(SystemExit): - makeSetup(features={'x':Feature('x', remove='y')}) + makeSetup(features={'x': Feature('x', remove='y')}) + class TestCommandTests: @@ -296,7 +298,7 @@ class TestCommandTests: assert (isinstance(test_cmd, distutils.cmd.Command)) def testLongOptSuiteWNoDefault(self): - ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) + ts1 = makeSetup(script_args=['test', '--test-suite=foo.tests.suite']) ts1 = ts1.get_command_obj('test') ts1.ensure_finalized() assert ts1.test_suite == 'foo.tests.suite' @@ -309,7 +311,7 @@ class TestCommandTests: def testDefaultWModuleOnCmdLine(self): ts3 = makeSetup( test_suite='bar.tests', - script_args=['test','-m','foo.tests'] + script_args=['test', '-m', 'foo.tests'] ).get_command_obj('test') ts3.ensure_finalized() assert ts3.test_module == 'foo.tests' @@ -317,7 +319,7 @@ class TestCommandTests: def testConflictingOptions(self): ts4 = makeSetup( - script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] + script_args=['test', '-m', 'bar.tests', '-s', 'foo.tests.suite'] ).get_command_obj('test') with pytest.raises(DistutilsOptionError): ts4.ensure_finalized() @@ -325,4 +327,4 @@ class TestCommandTests: def testNoSuite(self): ts5 = makeSetup().get_command_obj('test') ts5.ensure_finalized() - assert ts5.test_suite == None + assert ts5.test_suite is None diff --git a/setuptools/tests/contexts.py b/setuptools/tests/contexts.py index ae28c7c3..535ae107 100644 --- a/setuptools/tests/contexts.py +++ b/setuptools/tests/contexts.py @@ -10,7 +10,7 @@ import pkg_resources @contextlib.contextmanager -def tempdir(cd=lambda dir:None, **kwargs): +def tempdir(cd=lambda dir: None, **kwargs): temp_dir = tempfile.mkdtemp(**kwargs) orig_dir = os.getcwd() try: diff --git a/setuptools/tests/environment.py b/setuptools/tests/environment.py index a23c0504..b0e3bd36 100644 --- a/setuptools/tests/environment.py +++ b/setuptools/tests/environment.py @@ -25,11 +25,11 @@ def run_setup_py(cmd, pypath=None, path=None, for envname in os.environ: env[envname] = os.environ[envname] - #override the python path if needed + # override the python path if needed if pypath is not None: env["PYTHONPATH"] = pypath - #overide the execution path if needed + # overide the execution path if needed if path is not None: env["PATH"] = path if not env.get("PATH", ""): @@ -50,11 +50,11 @@ def run_setup_py(cmd, pypath=None, path=None, except OSError: return 1, '' - #decode the console string if needed - if hasattr(data, "decode"): + # decode the console string if needed + if hasattr(data, "decode"): # use the default encoding data = data.decode() data = unicodedata.normalize('NFC', data) - #communciate calls wait() + # communciate calls wait() return proc.returncode, data diff --git a/setuptools/tests/fixtures.py b/setuptools/tests/fixtures.py index c70c38cb..5204c8d1 100644 --- a/setuptools/tests/fixtures.py +++ b/setuptools/tests/fixtures.py @@ -1,24 +1,20 @@ -try: - from unittest import mock -except ImportError: - import mock import pytest from . import contexts @pytest.yield_fixture -def user_override(): +def user_override(monkeypatch): """ Override site.USER_BASE and site.USER_SITE with temporary directories in a context. """ with contexts.tempdir() as user_base: - with mock.patch('site.USER_BASE', user_base): - with contexts.tempdir() as user_site: - with mock.patch('site.USER_SITE', user_site): - with contexts.save_user_site_setting(): - yield + monkeypatch.setattr('site.USER_BASE', user_base) + with contexts.tempdir() as user_site: + monkeypatch.setattr('site.USER_SITE', user_site) + with contexts.save_user_site_setting(): + yield @pytest.yield_fixture diff --git a/setuptools/tests/py26compat.py b/setuptools/tests/py26compat.py index 7211f275..18cece05 100644 --- a/setuptools/tests/py26compat.py +++ b/setuptools/tests/py26compat.py @@ -2,12 +2,14 @@ import sys import tarfile import contextlib + def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ return contextlib.closing(tarfile.open(*args, **kwargs)) + if sys.version_info[:2] < (2, 7) or (3, 0) <= sys.version_info[:2] < (3, 2): tarfile_open = _tarfile_open_ex else: diff --git a/setuptools/tests/server.py b/setuptools/tests/server.py index 6a687937..9e5fefb7 100644 --- a/setuptools/tests/server.py +++ b/setuptools/tests/server.py @@ -18,6 +18,7 @@ class IndexServer(BaseHTTPServer.HTTPServer): # The index files should be located in setuptools/tests/indexes s.stop() """ + def __init__(self, server_address=('', 0), RequestHandlerClass=SimpleHTTPServer.SimpleHTTPRequestHandler): BaseHTTPServer.HTTPServer.__init__(self, server_address, @@ -42,16 +43,20 @@ class IndexServer(BaseHTTPServer.HTTPServer): port = self.server_port return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port + class RequestRecorder(BaseHTTPServer.BaseHTTPRequestHandler): + def do_GET(self): requests = vars(self.server).setdefault('requests', []) requests.append(self) self.send_response(200, 'OK') + class MockServer(BaseHTTPServer.HTTPServer, threading.Thread): """ A simple HTTP Server that records the requests made to it. """ + def __init__(self, server_address=('', 0), RequestHandlerClass=RequestRecorder): BaseHTTPServer.HTTPServer.__init__(self, server_address, diff --git a/setuptools/tests/test_archive_util.py b/setuptools/tests/test_archive_util.py new file mode 100644 index 00000000..b789e9ac --- /dev/null +++ b/setuptools/tests/test_archive_util.py @@ -0,0 +1,42 @@ +# coding: utf-8 + +import tarfile +import io + +from setuptools.extern import six + +import pytest + +from setuptools import archive_util + + +@pytest.fixture +def tarfile_with_unicode(tmpdir): + """ + Create a tarfile containing only a file whose name is + a zero byte file called testimäge.png. + """ + tarobj = io.BytesIO() + + with tarfile.open(fileobj=tarobj, mode="w:gz") as tgz: + data = b"" + + filename = "testimäge.png" + if six.PY2: + filename = filename.decode('utf-8') + + t = tarfile.TarInfo(filename) + t.size = len(data) + + tgz.addfile(t, io.BytesIO(data)) + + target = tmpdir / 'unicode-pkg-1.0.tar.gz' + with open(str(target), mode='wb') as tf: + tf.write(tarobj.getvalue()) + return str(target) + + +@pytest.mark.xfail(reason="#710 and #712") +def test_unicode_files(tarfile_with_unicode, tmpdir): + target = tmpdir / 'out' + archive_util.unpack_archive(tarfile_with_unicode, six.text_type(target)) diff --git a/setuptools/tests/test_bdist_egg.py b/setuptools/tests/test_bdist_egg.py index ccfb2ea7..c77aa226 100644 --- a/setuptools/tests/test_bdist_egg.py +++ b/setuptools/tests/test_bdist_egg.py @@ -9,30 +9,33 @@ from setuptools.dist import Distribution from . import contexts + SETUP_PY = """\ from setuptools import setup setup(name='foo', py_modules=['hi']) """ + @pytest.yield_fixture def setup_context(tmpdir): - with (tmpdir/'setup.py').open('w') as f: + with (tmpdir / 'setup.py').open('w') as f: f.write(SETUP_PY) - with (tmpdir/'hi.py').open('w') as f: + with (tmpdir / 'hi.py').open('w') as f: f.write('1\n') with tmpdir.as_cwd(): yield tmpdir class Test: + def test_bdist_egg(self, setup_context, user_override): dist = Distribution(dict( script_name='setup.py', script_args=['bdist_egg'], name='foo', py_modules=['hi'] - )) + )) os.makedirs(os.path.join('build', 'src')) with contexts.quiet(): dist.parse_command_line() diff --git a/setuptools/tests/test_build_ext.py b/setuptools/tests/test_build_ext.py index 0719ba44..ac002f44 100644 --- a/setuptools/tests/test_build_ext.py +++ b/setuptools/tests/test_build_ext.py @@ -1,9 +1,16 @@ +import sys import distutils.command.build_ext as orig +from distutils.sysconfig import get_config_var -from setuptools.command.build_ext import build_ext +from setuptools.extern import six + +from setuptools.command.build_ext import build_ext, get_abi3_suffix from setuptools.dist import Distribution +from setuptools.extension import Extension + class TestBuildExt: + def test_get_ext_filename(self): """ Setuptools needs to give back the same @@ -16,3 +23,24 @@ class TestBuildExt: res = cmd.get_ext_filename('foo') wanted = orig.build_ext.get_ext_filename(cmd, 'foo') assert res == wanted + + def test_abi3_filename(self): + """ + Filename needs to be loadable by several versions + of Python 3 if 'is_abi3' is truthy on Extension() + """ + print(get_abi3_suffix()) + + extension = Extension('spam.eggs', ['eggs.c'], py_limited_api=True) + dist = Distribution(dict(ext_modules=[extension])) + cmd = build_ext(dist) + cmd.finalize_options() + assert 'spam.eggs' in cmd.ext_map + res = cmd.get_ext_filename('spam.eggs') + + if six.PY2 or not get_abi3_suffix(): + assert res.endswith(get_config_var('SO')) + elif sys.platform == 'win32': + assert res.endswith('eggs.pyd') + else: + assert 'abi3' in res diff --git a/setuptools/tests/test_build_py.py b/setuptools/tests/test_build_py.py index ed1703ac..cc701ae6 100644 --- a/setuptools/tests/test_build_py.py +++ b/setuptools/tests/test_build_py.py @@ -26,6 +26,5 @@ def test_directories_in_package_data_glob(tmpdir_as_cwd): package_data={'': ['path/*']}, )) os.makedirs('path/subpath') - #with contexts.quiet(): dist.parse_command_line() dist.run_commands() diff --git a/setuptools/tests/test_develop.py b/setuptools/tests/test_develop.py index 1b844499..f1580785 100644 --- a/setuptools/tests/test_develop.py +++ b/setuptools/tests/test_develop.py @@ -26,6 +26,7 @@ setup(name='foo', INIT_PY = """print "foo" """ + @pytest.yield_fixture def temp_user(monkeypatch): with contexts.tempdir() as user_base: @@ -54,6 +55,7 @@ def test_env(tmpdir, temp_user): class TestDevelop: in_virtualenv = hasattr(sys, 'real_prefix') in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix + @pytest.mark.skipif(in_virtualenv or in_venv, reason="Cannot run when invoked in a virtualenv or venv") def test_2to3_user_mode(self, test_env): @@ -112,4 +114,4 @@ class TestDevelop: cmd.ensure_finalized() cmd.install_dir = tmpdir cmd.run() - #assert '0.0' not in foocmd_text + # assert '0.0' not in foocmd_text diff --git a/setuptools/tests/test_dist_info.py b/setuptools/tests/test_dist_info.py index 9f226a55..f7e7d2bf 100644 --- a/setuptools/tests/test_dist_info.py +++ b/setuptools/tests/test_dist_info.py @@ -1,8 +1,7 @@ """Test .dist-info style distributions. """ -import os -import shutil -import tempfile + +from __future__ import unicode_literals from setuptools.extern.six.moves import map @@ -14,10 +13,48 @@ from .textwrap import DALS class TestDistInfo: - def test_distinfo(self): + metadata_base = DALS(""" + Metadata-Version: 1.2 + Requires-Dist: splort (==4) + Provides-Extra: baz + Requires-Dist: quux (>=1.1); extra == 'baz' + """) + + @classmethod + def build_metadata(cls, **kwargs): + lines = ( + '{key}: {value}\n'.format(**locals()) + for key, value in kwargs.items() + ) + return cls.metadata_base + ''.join(lines) + + @pytest.fixture + def metadata(self, tmpdir): + dist_info_name = 'VersionedDistribution-2.718.dist-info' + versioned = tmpdir / dist_info_name + versioned.mkdir() + filename = versioned / 'METADATA' + content = self.build_metadata( + Name='VersionedDistribution', + ) + filename.write_text(content, encoding='utf-8') + + dist_info_name = 'UnversionedDistribution.dist-info' + unversioned = tmpdir / dist_info_name + unversioned.mkdir() + filename = unversioned / 'METADATA' + content = self.build_metadata( + Name='UnversionedDistribution', + Version='0.3', + ) + filename.write_text(content, encoding='utf-8') + + return str(tmpdir) + + def test_distinfo(self, metadata): dists = dict( (d.project_name, d) - for d in pkg_resources.find_distributions(self.tmpdir) + for d in pkg_resources.find_distributions(metadata) ) assert len(dists) == 2, dists @@ -25,49 +62,17 @@ class TestDistInfo: unversioned = dists['UnversionedDistribution'] versioned = dists['VersionedDistribution'] - assert versioned.version == '2.718' # from filename - assert unversioned.version == '0.3' # from METADATA + assert versioned.version == '2.718' # from filename + assert unversioned.version == '0.3' # from METADATA - def test_conditional_dependencies(self): + def test_conditional_dependencies(self, metadata): specs = 'splort==4', 'quux>=1.1' requires = list(map(pkg_resources.Requirement.parse, specs)) - for d in pkg_resources.find_distributions(self.tmpdir): + for d in pkg_resources.find_distributions(metadata): assert d.requires() == requires[:1] assert d.requires(extras=('baz',)) == [ requires[0], - pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"')] + pkg_resources.Requirement.parse('quux>=1.1;extra=="baz"'), + ] assert d.extras == ['baz'] - - metadata_template = DALS(""" - Metadata-Version: 1.2 - Name: {name} - {version} - Requires-Dist: splort (==4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """) - - def setup_method(self, method): - self.tmpdir = tempfile.mkdtemp() - dist_info_name = 'VersionedDistribution-2.718.dist-info' - versioned = os.path.join(self.tmpdir, dist_info_name) - os.mkdir(versioned) - with open(os.path.join(versioned, 'METADATA'), 'w+') as metadata_file: - metadata = self.metadata_template.format( - name='VersionedDistribution', - version='', - ).replace('\n\n', '\n') - metadata_file.write(metadata) - dist_info_name = 'UnversionedDistribution.dist-info' - unversioned = os.path.join(self.tmpdir, dist_info_name) - os.mkdir(unversioned) - with open(os.path.join(unversioned, 'METADATA'), 'w+') as metadata_file: - metadata = self.metadata_template.format( - name='UnversionedDistribution', - version='Version: 0.3', - ) - metadata_file.write(metadata) - - def teardown_method(self, method): - shutil.rmtree(self.tmpdir) diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index fd06b6ef..82e1d7e8 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -14,9 +14,10 @@ import logging import itertools import distutils.errors import io +import zipfile -from setuptools.extern.six.moves import urllib import time +from setuptools.extern.six.moves import urllib import pytest try: @@ -30,7 +31,7 @@ import setuptools.command.easy_install as ei from setuptools.command.easy_install import PthDistributions from setuptools.command import easy_install as easy_install_pkg from setuptools.dist import Distribution -from pkg_resources import working_set +from pkg_resources import normalize_path, working_set from pkg_resources import Distribution as PRDistribution import setuptools.tests.server import pkg_resources @@ -41,6 +42,7 @@ from .textwrap import DALS class FakeDist(object): + def get_entry_map(self, group): if group != 'console_scripts': return {} @@ -49,12 +51,14 @@ class FakeDist(object): def as_requirement(self): return 'spec' + SETUP_PY = DALS(""" from setuptools import setup setup(name='foo') """) + class TestEasyInstallTest: def test_install_site_py(self, tmpdir): @@ -70,10 +74,12 @@ class TestEasyInstallTest: expected = header + DALS(""" # EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' __requires__ = 'spec' + import re import sys from pkg_resources import load_entry_point if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit( load_entry_point('spec', 'console_scripts', 'name')() ) @@ -123,16 +129,72 @@ class TestEasyInstallTest: get_site_dirs should always return site dirs reported by site.getsitepackages. """ - mock_gsp = lambda: ['/setuptools/test/site-packages'] + path = normalize_path('/setuptools/test/site-packages') + mock_gsp = lambda: [path] monkeypatch.setattr(site, 'getsitepackages', mock_gsp, raising=False) - assert '/setuptools/test/site-packages' in ei.get_site_dirs() + assert path in ei.get_site_dirs() def test_all_site_dirs_works_without_getsitepackages(self, monkeypatch): monkeypatch.delattr(site, 'getsitepackages', raising=False) assert ei.get_site_dirs() + @pytest.fixture + def sdist_unicode(self, tmpdir): + files = [ + ( + 'setup.py', + DALS(""" + import setuptools + setuptools.setup( + name="setuptools-test-unicode", + version="1.0", + packages=["mypkg"], + include_package_data=True, + ) + """), + ), + ( + 'mypkg/__init__.py', + "", + ), + ( + u'mypkg/\u2603.txt', + "", + ), + ] + sdist_name = 'setuptools-test-unicode-1.0.zip' + sdist = tmpdir / sdist_name + # can't use make_sdist, because the issue only occurs + # with zip sdists. + sdist_zip = zipfile.ZipFile(str(sdist), 'w') + for filename, content in files: + sdist_zip.writestr(filename, content) + sdist_zip.close() + return str(sdist) + + @pytest.mark.xfail(reason="#709 and #710") + # also + #@pytest.mark.xfail(setuptools.tests.is_ascii, + # reason="https://github.com/pypa/setuptools/issues/706") + def test_unicode_filename_in_sdist(self, sdist_unicode, tmpdir, monkeypatch): + """ + The install command should execute correctly even if + the package has unicode filenames. + """ + dist = Distribution({'script_args': ['easy_install']}) + target = (tmpdir / 'target').ensure_dir() + cmd = ei.easy_install( + dist, + install_dir=str(target), + args=['x'], + ) + monkeypatch.setitem(os.environ, 'PYTHONPATH', str(target)) + cmd.ensure_finalized() + cmd.easy_install(sdist_unicode) + class TestPTHFileWriter: + def test_add_from_cwd_site_sets_dirty(self): '''a pth file manager should set dirty if a distribution is in site but also the cwd @@ -154,7 +216,7 @@ class TestPTHFileWriter: @pytest.yield_fixture def setup_context(tmpdir): - with (tmpdir/'setup.py').open('w') as f: + with (tmpdir / 'setup.py').open('w') as f: f.write(SETUP_PY) with tmpdir.as_cwd(): yield tmpdir @@ -266,6 +328,7 @@ def distutils_package(): class TestDistutilsPackage: + def test_bdist_egg_available_on_distutils_pkg(self, distutils_package): run_setup('setup.py', ['bdist_egg']) @@ -527,36 +590,40 @@ def make_trivial_sdist(dist_path, setup_py): dist.addfile(setup_py_file, fileobj=setup_py_bytes) +@pytest.mark.skipif( + sys.platform.startswith('java') and ei.is_sh(sys.executable), + reason="Test cannot run under java when executable is sh" +) class TestScriptHeader: non_ascii_exe = '/Users/José/bin/python' exe_with_spaces = r'C:\Program Files\Python33\python.exe' - @pytest.mark.skipif( - sys.platform.startswith('java') and ei.is_sh(sys.executable), - reason="Test cannot run under java when executable is sh" - ) def test_get_script_header(self): expected = '#!%s\n' % ei.nt_quote_arg(os.path.normpath(sys.executable)) actual = ei.ScriptWriter.get_script_header('#!/usr/local/bin/python') assert actual == expected + def test_get_script_header_args(self): expected = '#!%s -x\n' % ei.nt_quote_arg(os.path.normpath (sys.executable)) actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python -x') assert actual == expected + def test_get_script_header_non_ascii_exe(self): actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python', executable=self.non_ascii_exe) expected = '#!%s -x\n' % self.non_ascii_exe assert actual == expected + def test_get_script_header_exe_with_spaces(self): actual = ei.ScriptWriter.get_script_header('#!/usr/bin/python', - executable='"'+self.exe_with_spaces+'"') + executable='"' + self.exe_with_spaces + '"') expected = '#!"%s"\n' % self.exe_with_spaces assert actual == expected class TestCommandSpec: + def test_custom_launch_command(self): """ Show how a custom CommandSpec could be used to specify a #! executable @@ -590,17 +657,9 @@ class TestCommandSpec: assert len(cmd) == 2 assert '"' not in cmd.as_header() - def test_sys_executable(self): - """ - CommandSpec.from_string(sys.executable) should contain just that param. - """ - writer = ei.ScriptWriter.best() - cmd = writer.command_spec_class.from_string(sys.executable) - assert len(cmd) == 1 - assert cmd[0] == sys.executable - class TestWindowsScriptWriter: + def test_header(self): hdr = ei.WindowsScriptWriter.get_script_header('') assert hdr.startswith('#!') diff --git a/setuptools/tests/test_egg_info.py b/setuptools/tests/test_egg_info.py index 3a0db58f..dff2a8c8 100644 --- a/setuptools/tests/test_egg_info.py +++ b/setuptools/tests/test_egg_info.py @@ -179,7 +179,7 @@ class TestEggInfo(object): """ % requires_line) build_files({ 'setup.py': setup_script, - }) + }) def test_install_requires_with_markers(self, tmpdir_cwd, env): self._setup_script_with_requires( @@ -210,6 +210,32 @@ class TestEggInfo(object): self._run_install_command(tmpdir_cwd, env) assert glob.glob(os.path.join(env.paths['lib'], 'barbazquux*')) == [] + def test_python_requires_egg_info(self, tmpdir_cwd, env): + self._setup_script_with_requires( + """python_requires='>=2.7.12',""") + environ = os.environ.copy().update( + HOME=env.paths['home'], + ) + code, data = environment.run_setup_py( + cmd=['egg_info'], + pypath=os.pathsep.join([env.paths['lib'], str(tmpdir_cwd)]), + data_stream=1, + env=environ, + ) + egg_info_dir = os.path.join('.', 'foo.egg-info') + with open(os.path.join(egg_info_dir, 'PKG-INFO')) as pkginfo_file: + pkg_info_lines = pkginfo_file.read().split('\n') + assert 'Requires-Python: >=2.7.12' in pkg_info_lines + assert 'Metadata-Version: 1.2' in pkg_info_lines + + def test_python_requires_install(self, tmpdir_cwd, env): + self._setup_script_with_requires( + """python_requires='>=1.2.3',""") + self._run_install_command(tmpdir_cwd, env) + egg_info_dir = self._find_egg_info_files(env.paths['lib']).base + pkginfo = os.path.join(egg_info_dir, 'PKG-INFO') + assert 'Requires-Python: >=1.2.3' in open(pkginfo).read().split('\n') + def _run_install_command(self, tmpdir_cwd, env, cmd=None, output=None): environ = os.environ.copy().update( HOME=env.paths['home'], @@ -235,6 +261,7 @@ class TestEggInfo(object): def _find_egg_info_files(self, root): class DirList(list): + def __init__(self, files, base): super(DirList, self).__init__(files) self.base = base diff --git a/setuptools/tests/test_find_packages.py b/setuptools/tests/test_find_packages.py index 06a7c02e..9d31ccd7 100644 --- a/setuptools/tests/test_find_packages.py +++ b/setuptools/tests/test_find_packages.py @@ -13,6 +13,8 @@ from setuptools import find_packages find_420_packages = setuptools.PEP420PackageFinder.find # modeled after CPython's test.support.can_symlink + + def can_symlink(): TESTFN = tempfile.mktemp() symlink_path = TESTFN + "can_symlink" @@ -26,13 +28,15 @@ def can_symlink(): globals().update(can_symlink=lambda: can) return can + def has_symlink(): bad_symlink = ( # Windows symlink directory detection is broken on Python 3.2 - platform.system() == 'Windows' and sys.version_info[:2] == (3,2) + platform.system() == 'Windows' and sys.version_info[:2] == (3, 2) ) return can_symlink() and not bad_symlink + class TestFindPackages: def setup_method(self, method): @@ -94,6 +98,15 @@ class TestFindPackages: packages = find_packages(self.dist_dir, exclude=('pkg.*',)) assert packages == ['pkg'] + def test_exclude_recursive(self): + """ + Excluding a parent package should exclude all child packages as well. + """ + self._touch('__init__.py', self.pkg_dir) + self._touch('__init__.py', self.sub_pkg_dir) + packages = find_packages(self.dist_dir, exclude=('pkg',)) + assert packages == [] + def test_include_excludes_other(self): """ If include is specified, other packages should be excluded. diff --git a/setuptools/tests/test_install_scripts.py b/setuptools/tests/test_install_scripts.py new file mode 100644 index 00000000..7393241f --- /dev/null +++ b/setuptools/tests/test_install_scripts.py @@ -0,0 +1,88 @@ +"""install_scripts tests +""" + +import io +import sys + +import pytest + +from setuptools.command.install_scripts import install_scripts +from setuptools.dist import Distribution +from . import contexts + + +class TestInstallScripts: + settings = dict( + name='foo', + entry_points={'console_scripts': ['foo=foo:foo']}, + version='0.0', + ) + unix_exe = '/usr/dummy-test-path/local/bin/python' + unix_spaces_exe = '/usr/bin/env dummy-test-python' + win32_exe = 'C:\\Dummy Test Path\\Program Files\\Python 3.3\\python.exe' + + def _run_install_scripts(self, install_dir, executable=None): + dist = Distribution(self.settings) + dist.script_name = 'setup.py' + cmd = install_scripts(dist) + cmd.install_dir = install_dir + if executable is not None: + bs = cmd.get_finalized_command('build_scripts') + bs.executable = executable + cmd.ensure_finalized() + with contexts.quiet(): + cmd.run() + + @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only') + def test_sys_executable_escaping_unix(self, tmpdir, monkeypatch): + """ + Ensure that shebang is not quoted on Unix when getting the Python exe + from sys.executable. + """ + expected = '#!%s\n' % self.unix_exe + monkeypatch.setattr('sys.executable', self.unix_exe) + with tmpdir.as_cwd(): + self._run_install_scripts(str(tmpdir)) + with io.open(str(tmpdir.join('foo')), 'r') as f: + actual = f.readline() + assert actual == expected + + @pytest.mark.skipif(sys.platform != 'win32', reason='Windows only') + def test_sys_executable_escaping_win32(self, tmpdir, monkeypatch): + """ + Ensure that shebang is quoted on Windows when getting the Python exe + from sys.executable and it contains a space. + """ + expected = '#!"%s"\n' % self.win32_exe + monkeypatch.setattr('sys.executable', self.win32_exe) + with tmpdir.as_cwd(): + self._run_install_scripts(str(tmpdir)) + with io.open(str(tmpdir.join('foo-script.py')), 'r') as f: + actual = f.readline() + assert actual == expected + + @pytest.mark.skipif(sys.platform == 'win32', reason='non-Windows only') + def test_executable_with_spaces_escaping_unix(self, tmpdir): + """ + Ensure that shebang on Unix is not quoted, even when a value with spaces + is specified using --executable. + """ + expected = '#!%s\n' % self.unix_spaces_exe + with tmpdir.as_cwd(): + self._run_install_scripts(str(tmpdir), self.unix_spaces_exe) + with io.open(str(tmpdir.join('foo')), 'r') as f: + actual = f.readline() + assert actual == expected + + @pytest.mark.skipif(sys.platform != 'win32', reason='Windows only') + def test_executable_arg_escaping_win32(self, tmpdir): + """ + Ensure that shebang on Windows is quoted when getting a path with spaces + from --executable, that is itself properly quoted. + """ + expected = '#!"%s"\n' % self.win32_exe + with tmpdir.as_cwd(): + self._run_install_scripts(str(tmpdir), '"' + self.win32_exe + '"') + with io.open(str(tmpdir.join('foo-script.py')), 'r') as f: + actual = f.readline() + assert actual == expected diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py new file mode 100644 index 00000000..6360270d --- /dev/null +++ b/setuptools/tests/test_manifest.py @@ -0,0 +1,476 @@ +# -*- coding: utf-8 -*- +"""sdist tests""" + +import contextlib +import os +import shutil +import sys +import tempfile +from distutils import log +from distutils.errors import DistutilsTemplateError + +from setuptools.command.egg_info import FileList, egg_info +from setuptools.dist import Distribution +from setuptools.extern import six +from setuptools.tests.textwrap import DALS + +import pytest + +py3_only = pytest.mark.xfail(six.PY2, reason="Test runs on Python 3 only") + + +def make_local_path(s): + """Converts '/' in a string to os.sep""" + return s.replace('/', os.sep) + + +SETUP_ATTRS = { + 'name': 'app', + 'version': '0.0', + 'packages': ['app'], +} + + +SETUP_PY = """\ +from setuptools import setup + +setup(**%r) +""" % SETUP_ATTRS + + +@contextlib.contextmanager +def quiet(): + old_stdout, old_stderr = sys.stdout, sys.stderr + sys.stdout, sys.stderr = six.StringIO(), six.StringIO() + try: + yield + finally: + sys.stdout, sys.stderr = old_stdout, old_stderr + + +def touch(filename): + open(filename, 'w').close() + + +# The set of files always in the manifest, including all files in the +# .egg-info directory +default_files = frozenset(map(make_local_path, [ + 'README.rst', + 'MANIFEST.in', + 'setup.py', + 'app.egg-info/PKG-INFO', + 'app.egg-info/SOURCES.txt', + 'app.egg-info/dependency_links.txt', + 'app.egg-info/top_level.txt', + 'app/__init__.py', +])) + + +class TempDirTestCase(object): + + def setup_method(self, method): + self.temp_dir = tempfile.mkdtemp() + self.old_cwd = os.getcwd() + os.chdir(self.temp_dir) + + def teardown_method(self, method): + os.chdir(self.old_cwd) + shutil.rmtree(self.temp_dir) + + +class TestManifestTest(TempDirTestCase): + + def setup_method(self, method): + super(TestManifestTest, self).setup_method(method) + + f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') + f.write(SETUP_PY) + f.close() + + """ + Create a file tree like: + - LICENSE + - README.rst + - testing.rst + - .hidden.rst + - app/ + - __init__.py + - a.txt + - b.txt + - c.rst + - static/ + - app.js + - app.js.map + - app.css + - app.css.map + """ + + for fname in ['README.rst', '.hidden.rst', 'testing.rst', 'LICENSE']: + touch(os.path.join(self.temp_dir, fname)) + + # Set up the rest of the test package + test_pkg = os.path.join(self.temp_dir, 'app') + os.mkdir(test_pkg) + for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']: + touch(os.path.join(test_pkg, fname)) + + # Some compiled front-end assets to include + static = os.path.join(test_pkg, 'static') + os.mkdir(static) + for fname in ['app.js', 'app.js.map', 'app.css', 'app.css.map']: + touch(os.path.join(static, fname)) + + def make_manifest(self, contents): + """Write a MANIFEST.in.""" + with open(os.path.join(self.temp_dir, 'MANIFEST.in'), 'w') as f: + f.write(DALS(contents)) + + def get_files(self): + """Run egg_info and get all the files to include, as a set""" + dist = Distribution(SETUP_ATTRS) + dist.script_name = 'setup.py' + cmd = egg_info(dist) + cmd.ensure_finalized() + + cmd.run() + + return set(cmd.filelist.files) + + def test_no_manifest(self): + """Check a missing MANIFEST.in includes only the standard files.""" + assert (default_files - set(['MANIFEST.in'])) == self.get_files() + + def test_empty_files(self): + """Check an empty MANIFEST.in includes only the standard files.""" + self.make_manifest("") + assert default_files == self.get_files() + + def test_include(self): + """Include extra rst files in the project root.""" + self.make_manifest("include *.rst") + files = default_files | set([ + 'testing.rst', '.hidden.rst']) + assert files == self.get_files() + + def test_exclude(self): + """Include everything in app/ except the text files""" + l = make_local_path + self.make_manifest( + """ + include app/* + exclude app/*.txt + """) + files = default_files | set([l('app/c.rst')]) + assert files == self.get_files() + + def test_include_multiple(self): + """Include with multiple patterns.""" + l = make_local_path + self.make_manifest("include app/*.txt app/static/*") + files = default_files | set([ + l('app/a.txt'), l('app/b.txt'), + l('app/static/app.js'), l('app/static/app.js.map'), + l('app/static/app.css'), l('app/static/app.css.map')]) + assert files == self.get_files() + + def test_graft(self): + """Include the whole app/static/ directory.""" + l = make_local_path + self.make_manifest("graft app/static") + files = default_files | set([ + l('app/static/app.js'), l('app/static/app.js.map'), + l('app/static/app.css'), l('app/static/app.css.map')]) + assert files == self.get_files() + + def test_graft_global_exclude(self): + """Exclude all *.map files in the project.""" + l = make_local_path + self.make_manifest( + """ + graft app/static + global-exclude *.map + """) + files = default_files | set([ + l('app/static/app.js'), l('app/static/app.css')]) + assert files == self.get_files() + + def test_global_include(self): + """Include all *.rst, *.js, and *.css files in the whole tree.""" + l = make_local_path + self.make_manifest( + """ + global-include *.rst *.js *.css + """) + files = default_files | set([ + '.hidden.rst', 'testing.rst', l('app/c.rst'), + l('app/static/app.js'), l('app/static/app.css')]) + assert files == self.get_files() + + def test_graft_prune(self): + """Include all files in app/, except for the whole app/static/ dir.""" + l = make_local_path + self.make_manifest( + """ + graft app + prune app/static + """) + files = default_files | set([ + l('app/a.txt'), l('app/b.txt'), l('app/c.rst')]) + assert files == self.get_files() + + +class TestFileListTest(TempDirTestCase): + """ + A copy of the relevant bits of distutils/tests/test_filelist.py, + to ensure setuptools' version of FileList keeps parity with distutils. + """ + + def setup_method(self, method): + super(TestFileListTest, self).setup_method(method) + self.threshold = log.set_threshold(log.FATAL) + self._old_log = log.Log._log + log.Log._log = self._log + self.logs = [] + + def teardown_method(self, method): + log.set_threshold(self.threshold) + log.Log._log = self._old_log + super(TestFileListTest, self).teardown_method(method) + + def _log(self, level, msg, args): + if level not in (log.DEBUG, log.INFO, log.WARN, log.ERROR, log.FATAL): + raise ValueError('%s wrong log level' % str(level)) + self.logs.append((level, msg, args)) + + def get_logs(self, *levels): + def _format(msg, args): + if len(args) == 0: + return msg + return msg % args + return [_format(msg, args) for level, msg, args + in self.logs if level in levels] + + def clear_logs(self): + self.logs = [] + + def assertNoWarnings(self): + assert self.get_logs(log.WARN) == [] + self.clear_logs() + + def assertWarnings(self): + assert len(self.get_logs(log.WARN)) > 0 + self.clear_logs() + + def make_files(self, files): + for file in files: + file = os.path.join(self.temp_dir, file) + dirname, basename = os.path.split(file) + if not os.path.exists(dirname): + os.makedirs(dirname) + open(file, 'w').close() + + def test_process_template_line(self): + # testing all MANIFEST.in template patterns + file_list = FileList() + l = make_local_path + + # simulated file list + self.make_files([ + 'foo.tmp', 'ok', 'xo', 'four.txt', + 'buildout.cfg', + # filelist does not filter out VCS directories, + # it's sdist that does + l('.hg/last-message.txt'), + l('global/one.txt'), + l('global/two.txt'), + l('global/files.x'), + l('global/here.tmp'), + l('f/o/f.oo'), + l('dir/graft-one'), + l('dir/dir2/graft2'), + l('dir3/ok'), + l('dir3/sub/ok.txt'), + ]) + + MANIFEST_IN = DALS("""\ + include ok + include xo + exclude xo + include foo.tmp + include buildout.cfg + global-include *.x + global-include *.txt + global-exclude *.tmp + recursive-include f *.oo + recursive-exclude global *.x + graft dir + prune dir3 + """) + + for line in MANIFEST_IN.split('\n'): + if not line: + continue + file_list.process_template_line(line) + + wanted = [ + 'buildout.cfg', + 'four.txt', + 'ok', + l('.hg/last-message.txt'), + l('dir/graft-one'), + l('dir/dir2/graft2'), + l('f/o/f.oo'), + l('global/one.txt'), + l('global/two.txt'), + ] + file_list.sort() + + assert file_list.files == wanted + + def test_exclude_pattern(self): + # return False if no match + file_list = FileList() + assert not file_list.exclude_pattern('*.py') + + # return True if files match + file_list = FileList() + file_list.files = ['a.py', 'b.py'] + assert file_list.exclude_pattern('*.py') + + # test excludes + file_list = FileList() + file_list.files = ['a.py', 'a.txt'] + file_list.exclude_pattern('*.py') + assert file_list.files == ['a.txt'] + + def test_include_pattern(self): + # return False if no match + file_list = FileList() + file_list.set_allfiles([]) + assert not file_list.include_pattern('*.py') + + # return True if files match + file_list = FileList() + file_list.set_allfiles(['a.py', 'b.txt']) + assert file_list.include_pattern('*.py') + + # test * matches all files + file_list = FileList() + assert file_list.allfiles is None + file_list.set_allfiles(['a.py', 'b.txt']) + file_list.include_pattern('*') + assert file_list.allfiles == ['a.py', 'b.txt'] + + def test_process_template(self): + l = make_local_path + # invalid lines + file_list = FileList() + for action in ('include', 'exclude', 'global-include', + 'global-exclude', 'recursive-include', + 'recursive-exclude', 'graft', 'prune', 'blarg'): + try: + file_list.process_template_line(action) + except DistutilsTemplateError: + pass + except Exception: + assert False, "Incorrect error thrown" + else: + assert False, "Should have thrown an error" + + # include + file_list = FileList() + file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) + + file_list.process_template_line('include *.py') + assert file_list.files == ['a.py'] + self.assertNoWarnings() + + file_list.process_template_line('include *.rb') + assert file_list.files == ['a.py'] + self.assertWarnings() + + # exclude + file_list = FileList() + file_list.files = ['a.py', 'b.txt', l('d/c.py')] + + file_list.process_template_line('exclude *.py') + assert file_list.files == ['b.txt', l('d/c.py')] + self.assertNoWarnings() + + file_list.process_template_line('exclude *.rb') + assert file_list.files == ['b.txt', l('d/c.py')] + self.assertWarnings() + + # global-include + file_list = FileList() + file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')]) + + file_list.process_template_line('global-include *.py') + assert file_list.files == ['a.py', l('d/c.py')] + self.assertNoWarnings() + + file_list.process_template_line('global-include *.rb') + assert file_list.files == ['a.py', l('d/c.py')] + self.assertWarnings() + + # global-exclude + file_list = FileList() + file_list.files = ['a.py', 'b.txt', l('d/c.py')] + + file_list.process_template_line('global-exclude *.py') + assert file_list.files == ['b.txt'] + self.assertNoWarnings() + + file_list.process_template_line('global-exclude *.rb') + assert file_list.files == ['b.txt'] + self.assertWarnings() + + # recursive-include + file_list = FileList() + file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'), + l('d/d/e.py')]) + + file_list.process_template_line('recursive-include d *.py') + assert file_list.files == [l('d/b.py'), l('d/d/e.py')] + self.assertNoWarnings() + + file_list.process_template_line('recursive-include e *.py') + assert file_list.files == [l('d/b.py'), l('d/d/e.py')] + self.assertWarnings() + + # recursive-exclude + file_list = FileList() + file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')] + + file_list.process_template_line('recursive-exclude d *.py') + assert file_list.files == ['a.py', l('d/c.txt')] + self.assertNoWarnings() + + file_list.process_template_line('recursive-exclude e *.py') + assert file_list.files == ['a.py', l('d/c.txt')] + self.assertWarnings() + + # graft + file_list = FileList() + file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'), + l('f/f.py')]) + + file_list.process_template_line('graft d') + assert file_list.files == [l('d/b.py'), l('d/d/e.py')] + self.assertNoWarnings() + + file_list.process_template_line('graft e') + assert file_list.files == [l('d/b.py'), l('d/d/e.py')] + self.assertWarnings() + + # prune + file_list = FileList() + file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')] + + file_list.process_template_line('prune d') + assert file_list.files == ['a.py', l('f/f.py')] + self.assertNoWarnings() + + file_list.process_template_line('prune e') + assert file_list.files == ['a.py', l('f/f.py')] + self.assertWarnings() diff --git a/setuptools/tests/test_msvc9compiler.py b/setuptools/tests/test_msvc.py index 09e0460c..a0c76ea0 100644 --- a/setuptools/tests/test_msvc9compiler.py +++ b/setuptools/tests/test_msvc.py @@ -1,5 +1,5 @@ """ -Tests for msvc9compiler. +Tests for msvc support module. """ import os @@ -69,9 +69,9 @@ class TestModulePatch: def test_patched(self): "Test the module is actually patched" mod_name = distutils.msvc9compiler.find_vcvarsall.__module__ - assert mod_name == "setuptools.msvc9_support", "find_vcvarsall unpatched" + assert mod_name == "setuptools.msvc", "find_vcvarsall unpatched" - def test_no_registry_entryies_means_nothing_found(self): + def test_no_registry_entries_means_nothing_found(self): """ No registry entries or environment variable should lead to an error directing the user to download vcpython27. @@ -83,10 +83,12 @@ class TestModulePatch: with mock_reg(): assert find_vcvarsall(9.0) is None - expected = distutils.errors.DistutilsPlatformError - with pytest.raises(expected) as exc: + try: query_vcvarsall(9.0) - assert 'aka.ms/vcpython27' in str(exc) + except Exception as exc: + expected = distutils.errors.DistutilsPlatformError + assert isinstance(exc, expected) + assert 'aka.ms/vcpython27' in str(exc) @pytest.yield_fixture def user_preferred_setting(self): diff --git a/setuptools/tests/test_packageindex.py b/setuptools/tests/test_packageindex.py index 6a76b5fc..f9bf895b 100644 --- a/setuptools/tests/test_packageindex.py +++ b/setuptools/tests/test_packageindex.py @@ -7,14 +7,24 @@ import distutils.errors from setuptools.extern import six from setuptools.extern.six.moves import urllib, http_client -from .textwrap import DALS import pkg_resources import setuptools.package_index from setuptools.tests.server import IndexServer +from .textwrap import DALS class TestPackageIndex: + def test_regex(self): + hash_url = 'http://other_url?:action=show_md5&' + hash_url += 'digest=0123456789abcdef0123456789abcdef' + doc = """ + <a href="http://some_url">Name</a> + (<a title="MD5 hash" + href="{hash_url}">md5</a>) + """.lstrip().format(**locals()) + assert setuptools.package_index.PYPI_MD5.match(doc) + def test_bad_url_bad_port(self): index = setuptools.package_index.PackageIndex() url = 'http://127.0.0.1:0/nonesuch/test_package_index' @@ -129,7 +139,7 @@ class TestPackageIndex: # the distribution has been found assert 'foobar' in pi # we have only one link, because links are compared without md5 - assert len(pi['foobar'])==1 + assert len(pi['foobar']) == 1 # the link should be from the index assert 'correct_md5' in pi['foobar'][0].location @@ -209,6 +219,7 @@ class TestContentCheckers: class TestPyPIConfig: + def test_percent_in_password(self, tmpdir, monkeypatch): monkeypatch.setitem(os.environ, 'HOME', str(tmpdir)) pypirc = tmpdir / '.pypirc' diff --git a/setuptools/tests/test_sandbox.py b/setuptools/tests/test_sandbox.py index fefd46f7..aa6138e4 100644 --- a/setuptools/tests/test_sandbox.py +++ b/setuptools/tests/test_sandbox.py @@ -57,6 +57,7 @@ class TestSandbox: class TestExceptionSaver: + def test_exception_trapped(self): with setuptools.sandbox.ExceptionSaver(): raise ValueError("details") diff --git a/setuptools/tests/test_sdist.py b/setuptools/tests/test_sdist.py index d2a1f1bb..16d0eb07 100644 --- a/setuptools/tests/test_sdist.py +++ b/setuptools/tests/test_sdist.py @@ -132,7 +132,6 @@ class TestSdistTest: assert os.path.join('sdist_test', 'b.txt') in manifest assert os.path.join('sdist_test', 'c.rst') not in manifest - def test_defaults_case_sensitivity(self): """ Make sure default files (README.*, etc.) are added in a case-sensitive diff --git a/setuptools/unicode_utils.py b/setuptools/unicode_utils.py index ffab3e24..7c63efd2 100644 --- a/setuptools/unicode_utils.py +++ b/setuptools/unicode_utils.py @@ -3,6 +3,7 @@ import sys from setuptools.extern import six + # HFS Plus uses decomposed UTF-8 def decompose(path): if isinstance(path, six.text_type): diff --git a/setuptools/utils.py b/setuptools/utils.py index 91e4b87f..080b9a8e 100644 --- a/setuptools/utils.py +++ b/setuptools/utils.py @@ -3,9 +3,9 @@ import os.path def cs_path_exists(fspath): - if not os.path.exists(fspath): + if not os.path.exists(fspath): return False # make absolute so we always have a directory abspath = os.path.abspath(fspath) directory, filename = os.path.split(abspath) - return filename in os.listdir(directory)
\ No newline at end of file + return filename in os.listdir(directory) |