summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES.rst12
-rw-r--r--pkg_resources/__init__.py34
-rw-r--r--pkg_resources/tests/test_resources.py70
-rw-r--r--setuptools/msvc.py29
-rw-r--r--setuptools/tests/test_manifest.py475
5 files changed, 562 insertions, 58 deletions
diff --git a/CHANGES.rst b/CHANGES.rst
index bffad4c5..9f098df9 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -2,6 +2,11 @@
CHANGES
=======
+v25.3.1
+-------
+
+* #739 Fix unquoted libpaths by fixing compatibility between `numpy.distutils` and `distutils._msvccompiler` for numpy < 1.11.2 (Fix issue #728, error also fixed in Numpy).
+
v25.3.0
-------
@@ -16,20 +21,21 @@ v25.2.0
v25.1.6
-------
-* #725
+* #725: revert `library_dir_option` patch (Error is related to `numpy.distutils` and make errors on non Numpy users).
v25.1.5
-------
* #720
-* #723
+* #723: Improve patch for `library_dir_option`.
v25.1.4
-------
* #717
* #713
-* #707 via #715
+* #707: Fix Python 2 compatibility for MSVC by catching errors properly.
+* #715: Fix unquoted libpaths by patching `library_dir_option`.
v25.1.3
-------
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py
index 17b69727..87455a0d 100644
--- a/pkg_resources/__init__.py
+++ b/pkg_resources/__init__.py
@@ -752,8 +752,8 @@ class WorkingSet(object):
if entry is None:
entry = dist.location
- keys = self.entry_keys.setdefault(entry,[])
- keys2 = self.entry_keys.setdefault(dist.location,[])
+ keys = self.entry_keys.setdefault(entry, [])
+ keys2 = self.entry_keys.setdefault(dist.location, [])
if not replace and dist.key in self.by_key:
# ignore hidden distros
return
@@ -1353,7 +1353,7 @@ def get_default_cache():
# best option, should be locale-safe
(('APPDATA',), None),
(('USERPROFILE',), app_data),
- (('HOMEDRIVE','HOMEPATH'), app_data),
+ (('HOMEDRIVE', 'HOMEPATH'), app_data),
(('HOMEPATH',), app_data),
(('HOME',), None),
# 95/98/ME
@@ -1392,7 +1392,7 @@ def safe_version(version):
# normalize the version
return str(packaging.version.Version(version))
except packaging.version.InvalidVersion:
- version = version.replace(' ','.')
+ version = version.replace(' ', '.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
@@ -1410,7 +1410,7 @@ def to_filename(name):
Any '-' characters are currently replaced with '_'.
"""
- return name.replace('-','_')
+ return name.replace('-', '_')
def invalid_marker(text):
@@ -1508,7 +1508,7 @@ class NullProvider:
cache[script_filename] = (
len(script_text), 0, script_text.split('\n'), script_filename
)
- script_code = compile(script_text, script_filename,'exec')
+ script_code = compile(script_text, script_filename, 'exec')
exec(script_code, namespace, namespace)
def _has(self, path):
@@ -1965,7 +1965,7 @@ def find_on_path(importer, path_item, only=False):
if _is_unpacked_egg(path_item):
yield Distribution.from_filename(
path_item, metadata=PathMetadata(
- path_item, os.path.join(path_item,'EGG-INFO')
+ path_item, os.path.join(path_item, 'EGG-INFO')
)
)
else:
@@ -2037,7 +2037,7 @@ def _handle_ns(packageName, path_item):
module = sys.modules[packageName] = types.ModuleType(packageName)
module.__path__ = []
_set_parent_ns(packageName)
- elif not hasattr(module,'__path__'):
+ elif not hasattr(module, '__path__'):
raise TypeError("Not a package:", packageName)
handler = _find_adapter(_namespace_handlers, importer)
subpath = handler(importer, path_item, packageName, module)
@@ -2089,8 +2089,8 @@ def declare_namespace(packageName):
# Track what packages are namespaces, so when new path items are added,
# they can be updated
- _namespace_packages.setdefault(parent,[]).append(packageName)
- _namespace_packages.setdefault(packageName,[])
+ _namespace_packages.setdefault(parent, []).append(packageName)
+ _namespace_packages.setdefault(packageName, [])
for path_item in path:
# Ensure all the parent's path items are reflected in the child,
@@ -2104,7 +2104,7 @@ def fixup_namespace_packages(path_item, parent=None):
"""Ensure that previously-declared namespace packages include path_item"""
_imp.acquire_lock()
try:
- for package in _namespace_packages.get(parent,()):
+ for package in _namespace_packages.get(parent, ()):
subpath = _handle_ns(package, path_item)
if subpath:
fixup_namespace_packages(subpath, package)
@@ -2482,7 +2482,7 @@ class Distribution(object):
elif not evaluate_marker(marker):
reqs = []
extra = safe_extra(extra) or None
- dm.setdefault(extra,[]).extend(parse_requirements(reqs))
+ dm.setdefault(extra, []).extend(parse_requirements(reqs))
return dm
def requires(self, extras=()):
@@ -2578,7 +2578,7 @@ class Distribution(object):
self._get_metadata('entry_points.txt'), self
)
if group is not None:
- return ep_map.get(group,{})
+ return ep_map.get(group, {})
return ep_map
def get_entry_info(self, group, name):
@@ -2682,7 +2682,7 @@ class Distribution(object):
return False
return True
- def clone(self,**kw):
+ def clone(self, **kw):
"""Copy this distribution, substituting in any changed keyword args"""
names = 'project_name version py_version platform location precedence'
for attr in names.split():
@@ -2769,7 +2769,7 @@ _distributionImpl = {
}
-def issue_warning(*args,**kw):
+def issue_warning(*args, **kw):
level = 1
g = globals()
try:
@@ -2916,12 +2916,12 @@ def split_sections(s):
# wrap up last segment
yield section, content
-def _mkstemp(*args,**kw):
+def _mkstemp(*args, **kw):
old_open = os.open
try:
# temporarily bypass sandboxing
os.open = os_open
- return tempfile.mkstemp(*args,**kw)
+ return tempfile.mkstemp(*args, **kw)
finally:
# and then put it back
os.open = old_open
diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py
index 4e7652e3..1d663b83 100644
--- a/pkg_resources/tests/test_resources.py
+++ b/pkg_resources/tests/test_resources.py
@@ -51,15 +51,15 @@ class TestDistro:
assert list(ad) == ['foopkg']
# Distributions sort by version
- assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.3-1','1.2']
+ assert [dist.version for dist in ad['FooPkg']] == ['1.4', '1.3-1', '1.2']
# Removing a distribution leaves sequence alone
ad.remove(ad['FooPkg'][1])
- assert [dist.version for dist in ad['FooPkg']] == ['1.4','1.2']
+ assert [dist.version for dist in ad['FooPkg']] == ['1.4', '1.2']
# And inserting adds them in order
ad.add(dist_from_fn("FooPkg-1.9.egg"))
- assert [dist.version for dist in ad['FooPkg']] == ['1.9','1.4','1.2']
+ assert [dist.version for dist in ad['FooPkg']] == ['1.9', '1.4', '1.2']
ws = WorkingSet([])
foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
@@ -86,7 +86,7 @@ class TestDistro:
ws.add(foo14)
assert ad.best_match(req, ws).version == '1.4'
- def checkFooPkg(self,d):
+ def checkFooPkg(self, d):
assert d.project_name == "FooPkg"
assert d.key == "foopkg"
assert d.version == "1.3.post1"
@@ -97,7 +97,7 @@ class TestDistro:
def testDistroBasics(self):
d = Distribution(
"/some/path",
- project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
+ project_name="FooPkg", version="1.3-1", py_version="2.4", platform="win32"
)
self.checkFooPkg(d)
@@ -115,7 +115,7 @@ class TestDistro:
d = Distribution(
"/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
metadata=Metadata(
- ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
+ ('PKG-INFO', "Metadata-Version: 1.0\nVersion: 1.3-1\n")
)
)
self.checkFooPkg(d)
@@ -164,7 +164,7 @@ class TestDistro:
ad.add(Baz)
# Activation list now includes resolved dependency
- assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) == [Foo,Baz]
+ assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) == [Foo, Baz]
# Requests for conflicting versions produce VersionConflict
with pytest.raises(VersionConflict) as vc:
ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
@@ -218,7 +218,7 @@ class TestDistro:
quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
ad.add(quux)
res = list(ws.resolve(parse_requirements("Foo[baz]"), ad))
- assert res == [Foo,quux]
+ assert res == [Foo, quux]
def test_marker_evaluation_with_multiple_extras(self):
ad = pkg_resources.Environment([])
@@ -238,7 +238,7 @@ class TestDistro:
fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info")
ad.add(fred)
res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad))
- assert sorted(res) == [fred,quux,Foo]
+ assert sorted(res) == [fred, quux, Foo]
def test_marker_evaluation_with_extras_loop(self):
ad = pkg_resources.Environment([])
@@ -274,19 +274,19 @@ class TestDistro:
docutils>=0.3
[fastcgi]
fcgiapp>=0.1""")
- self.checkRequires(d,"Twisted>=1.5")
+ self.checkRequires(d, "Twisted>=1.5")
self.checkRequires(
- d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
+ d, "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
)
self.checkRequires(
- d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
+ d, "Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
)
self.checkRequires(
- d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
- ["docgen","fastcgi"]
+ d, "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
+ ["docgen", "fastcgi"]
)
self.checkRequires(
- d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
+ d, "Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
["fastcgi", "docgen"]
)
with pytest.raises(pkg_resources.UnknownExtra):
@@ -348,7 +348,7 @@ class TestEntryPoints:
def setup_method(self, method):
self.dist = Distribution.from_filename(
- "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
+ "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt', '[x]')))
def testBasics(self):
ep = EntryPoint(
@@ -405,7 +405,7 @@ class TestEntryPoints:
submap_expect = dict(
feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
- feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']),
+ feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1', 'extra2']),
feature3=EntryPoint('feature3', 'this.module', extras=['something'])
)
submap_str = """
@@ -423,7 +423,7 @@ class TestEntryPoints:
EntryPoint.parse_group("x", ["foo=baz", "foo=bar"])
def testParseMap(self):
- m = EntryPoint.parse_map({'xyz':self.submap_str})
+ m = EntryPoint.parse_map({'xyz': self.submap_str})
self.checkSubMap(m['xyz'])
assert list(m.keys()) == ['xyz']
m = EntryPoint.parse_map("[xyz]\n" + self.submap_str)
@@ -480,7 +480,7 @@ class TestRequirements:
hash((
"twisted",
packaging.specifiers.SpecifierSet(">=1.2"),
- frozenset(["foo","bar"]),
+ frozenset(["foo", "bar"]),
None
))
)
@@ -521,9 +521,9 @@ class TestParsing:
assert list(parse_requirements('')) == []
def testYielding(self):
- for inp,out in [
- ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
- (['x\n\n','y'], ['x','y']),
+ for inp, out in [
+ ([], []), ('x', ['x']), ([[]], []), (' x\n y', ['x', 'y']),
+ (['x\n\n', 'y'], ['x', 'y']),
]:
assert list(pkg_resources.yield_lines(inp)) == out
@@ -626,9 +626,9 @@ class TestParsing:
req, = parse_requirements('foo >= 1.0, < 3')
def testVersionEquality(self):
- def c(s1,s2):
- p1, p2 = parse_version(s1),parse_version(s2)
- assert p1 == p2, (s1,s2,p1,p2)
+ def c(s1, s2):
+ p1, p2 = parse_version(s1), parse_version(s2)
+ assert p1 == p2, (s1, s2, p1, p2)
c('1.2-rc1', '1.2rc1')
c('0.4', '0.4.0')
@@ -642,13 +642,13 @@ class TestParsing:
c('1.2.a', '1.2a')
def testVersionOrdering(self):
- def c(s1,s2):
- p1, p2 = parse_version(s1),parse_version(s2)
- assert p1 < p2, (s1,s2,p1,p2)
+ def c(s1, s2):
+ p1, p2 = parse_version(s1), parse_version(s2)
+ assert p1 < p2, (s1, s2, p1, p2)
- c('2.1','2.1.1')
- c('2a1','2b0')
- c('2a1','2.1')
+ c('2.1', '2.1.1')
+ c('2a1', '2b0')
+ c('2a1', '2.1')
c('2.3a1', '2.3')
c('2.1-1', '2.1-2')
c('2.1-1', '2.1.1')
@@ -660,8 +660,8 @@ class TestParsing:
c('0.4', '4.0')
c('0.0.4', '0.4.0')
c('0post1', '0.4post1')
- c('2.1.0-rc1','2.1.0')
- c('2.1dev','2.1a0')
+ c('2.1.0-rc1', '2.1.0')
+ c('2.1dev', '2.1a0')
torture = """
0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
@@ -669,9 +669,9 @@ class TestParsing:
0.77.2-1 0.77.1-1 0.77.0-1
""".split()
- for p,v1 in enumerate(torture):
+ for p, v1 in enumerate(torture):
for v2 in torture[p + 1:]:
- c(v2,v1)
+ c(v2, v1)
def testVersionBuildout(self):
"""
diff --git a/setuptools/msvc.py b/setuptools/msvc.py
index 4616d4be..bffaa6aa 100644
--- a/setuptools/msvc.py
+++ b/setuptools/msvc.py
@@ -2,9 +2,11 @@
This module adds improved support for Microsoft Visual C++ compilers.
"""
import os
+import sys
import platform
import itertools
import distutils.errors
+from distutils.version import StrictVersion
from setuptools.extern.six.moves import filterfalse
@@ -75,14 +77,21 @@ def patch_for_specialized_compiler():
msvc9compiler.find_vcvarsall = msvc9_find_vcvarsall
unpatched['msvc9_query_vcvarsall'] = msvc9compiler.query_vcvarsall
msvc9compiler.query_vcvarsall = msvc9_query_vcvarsall
- except Exception:
+ except NameError:
pass
try:
# Patch distutils._msvccompiler._get_vc_env
unpatched['msvc14_get_vc_env'] = msvc14compiler._get_vc_env
msvc14compiler._get_vc_env = msvc14_get_vc_env
- except Exception:
+ except NameError:
+ pass
+
+ try:
+ # Patch distutils._msvccompiler.gen_lib_options for Numpy
+ unpatched['msvc14_gen_lib_options'] = msvc14compiler.gen_lib_options
+ msvc14compiler.gen_lib_options = msvc14_gen_lib_options
+ except NameError:
pass
@@ -212,6 +221,19 @@ def msvc14_get_vc_env(plat_spec):
raise
+def msvc14_gen_lib_options(*args, **kwargs):
+ """
+ Patched "distutils._msvccompiler.gen_lib_options" for fix
+ compatibility between "numpy.distutils" and "distutils._msvccompiler"
+ (for Numpy < 1.11.2)
+ """
+ if "numpy.distutils" in sys.modules:
+ import numpy as np
+ if StrictVersion(np.__version__) < StrictVersion('1.11.2'):
+ return np.distutils.ccompiler.gen_lib_options(*args, **kwargs)
+ return unpatched['msvc14_gen_lib_options'](*args, **kwargs)
+
+
def _augment_exception(exc, version, arch=''):
"""
Add details to the exception message to help guide the user
@@ -243,7 +265,8 @@ def _augment_exception(exc, version, arch=''):
elif version >= 14.0:
# For VC++ 14.0 Redirect user to Visual C++ Build Tools
message += (' Get it with "Microsoft Visual C++ Build Tools": '
- r'http://landinghub.visualstudio.com/visual-cpp-build-tools')
+ r'http://landinghub.visualstudio.com/'
+ 'visual-cpp-build-tools')
exc.args = (message, )
diff --git a/setuptools/tests/test_manifest.py b/setuptools/tests/test_manifest.py
new file mode 100644
index 00000000..6e67ca61
--- /dev/null
+++ b/setuptools/tests/test_manifest.py
@@ -0,0 +1,475 @@
+# -*- coding: utf-8 -*-
+"""sdist tests"""
+
+import contextlib
+import os
+import shutil
+import sys
+import tempfile
+from distutils import log
+from distutils.errors import DistutilsTemplateError
+
+from setuptools.command.egg_info import FileList, egg_info
+from setuptools.dist import Distribution
+from setuptools.extern import six
+from setuptools.tests.textwrap import DALS
+
+import pytest
+
+py3_only = pytest.mark.xfail(six.PY2, reason="Test runs on Python 3 only")
+
+
+def make_local_path(s):
+ """Converts '/' in a string to os.sep"""
+ return s.replace('/', os.sep)
+
+
+SETUP_ATTRS = {
+ 'name': 'app',
+ 'version': '0.0',
+ 'packages': ['app'],
+}
+
+
+SETUP_PY = """\
+from setuptools import setup
+
+setup(**%r)
+""" % SETUP_ATTRS
+
+
+@contextlib.contextmanager
+def quiet():
+ old_stdout, old_stderr = sys.stdout, sys.stderr
+ sys.stdout, sys.stderr = six.StringIO(), six.StringIO()
+ try:
+ yield
+ finally:
+ sys.stdout, sys.stderr = old_stdout, old_stderr
+
+
+def touch(filename):
+ open(filename, 'w').close()
+
+# The set of files always in the manifest, including all files in the
+# .egg-info directory
+default_files = frozenset(map(make_local_path, [
+ 'README.rst',
+ 'MANIFEST.in',
+ 'setup.py',
+ 'app.egg-info/PKG-INFO',
+ 'app.egg-info/SOURCES.txt',
+ 'app.egg-info/dependency_links.txt',
+ 'app.egg-info/top_level.txt',
+ 'app/__init__.py',
+]))
+
+
+class TempDirTestCase(object):
+
+ def setup_method(self, method):
+ self.temp_dir = tempfile.mkdtemp()
+ self.old_cwd = os.getcwd()
+ os.chdir(self.temp_dir)
+
+ def teardown_method(self, method):
+ os.chdir(self.old_cwd)
+ shutil.rmtree(self.temp_dir)
+
+
+class TestManifestTest(TempDirTestCase):
+
+ def setup_method(self, method):
+ super(TestManifestTest, self).setup_method(method)
+
+ f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
+ f.write(SETUP_PY)
+ f.close()
+
+ """
+ Create a file tree like:
+ - LICENSE
+ - README.rst
+ - testing.rst
+ - .hidden.rst
+ - app/
+ - __init__.py
+ - a.txt
+ - b.txt
+ - c.rst
+ - static/
+ - app.js
+ - app.js.map
+ - app.css
+ - app.css.map
+ """
+
+ for fname in ['README.rst', '.hidden.rst', 'testing.rst', 'LICENSE']:
+ touch(os.path.join(self.temp_dir, fname))
+
+ # Set up the rest of the test package
+ test_pkg = os.path.join(self.temp_dir, 'app')
+ os.mkdir(test_pkg)
+ for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
+ touch(os.path.join(test_pkg, fname))
+
+ # Some compiled front-end assets to include
+ static = os.path.join(test_pkg, 'static')
+ os.mkdir(static)
+ for fname in ['app.js', 'app.js.map', 'app.css', 'app.css.map']:
+ touch(os.path.join(static, fname))
+
+ def make_manifest(self, contents):
+ """Write a MANIFEST.in."""
+ with open(os.path.join(self.temp_dir, 'MANIFEST.in'), 'w') as f:
+ f.write(DALS(contents))
+
+ def get_files(self):
+ """Run egg_info and get all the files to include, as a set"""
+ dist = Distribution(SETUP_ATTRS)
+ dist.script_name = 'setup.py'
+ cmd = egg_info(dist)
+ cmd.ensure_finalized()
+
+ cmd.run()
+
+ return set(cmd.filelist.files)
+
+ def test_no_manifest(self):
+ """Check a missing MANIFEST.in includes only the standard files."""
+ assert (default_files - set(['MANIFEST.in'])) == self.get_files()
+
+ def test_empty_files(self):
+ """Check an empty MANIFEST.in includes only the standard files."""
+ self.make_manifest("")
+ assert default_files == self.get_files()
+
+ def test_include(self):
+ """Include extra rst files in the project root."""
+ self.make_manifest("include *.rst")
+ files = default_files | set([
+ 'testing.rst', '.hidden.rst'])
+ assert files == self.get_files()
+
+ def test_exclude(self):
+ """Include everything in app/ except the text files"""
+ l = make_local_path
+ self.make_manifest(
+ """
+ include app/*
+ exclude app/*.txt
+ """)
+ files = default_files | set([l('app/c.rst')])
+ assert files == self.get_files()
+
+ def test_include_multiple(self):
+ """Include with multiple patterns."""
+ l = make_local_path
+ self.make_manifest("include app/*.txt app/static/*")
+ files = default_files | set([
+ l('app/a.txt'), l('app/b.txt'),
+ l('app/static/app.js'), l('app/static/app.js.map'),
+ l('app/static/app.css'), l('app/static/app.css.map')])
+ assert files == self.get_files()
+
+ def test_graft(self):
+ """Include the whole app/static/ directory."""
+ l = make_local_path
+ self.make_manifest("graft app/static")
+ files = default_files | set([
+ l('app/static/app.js'), l('app/static/app.js.map'),
+ l('app/static/app.css'), l('app/static/app.css.map')])
+ assert files == self.get_files()
+
+ def test_graft_global_exclude(self):
+ """Exclude all *.map files in the project."""
+ l = make_local_path
+ self.make_manifest(
+ """
+ graft app/static
+ global-exclude *.map
+ """)
+ files = default_files | set([
+ l('app/static/app.js'), l('app/static/app.css')])
+ assert files == self.get_files()
+
+ def test_global_include(self):
+ """Include all *.rst, *.js, and *.css files in the whole tree."""
+ l = make_local_path
+ self.make_manifest(
+ """
+ global-include *.rst *.js *.css
+ """)
+ files = default_files | set([
+ '.hidden.rst', 'testing.rst', l('app/c.rst'),
+ l('app/static/app.js'), l('app/static/app.css')])
+ assert files == self.get_files()
+
+ def test_graft_prune(self):
+ """Include all files in app/, except for the whole app/static/ dir."""
+ l = make_local_path
+ self.make_manifest(
+ """
+ graft app
+ prune app/static
+ """)
+ files = default_files | set([
+ l('app/a.txt'), l('app/b.txt'), l('app/c.rst')])
+ assert files == self.get_files()
+
+
+class TestFileListTest(TempDirTestCase):
+ """
+ A copy of the relevant bits of distutils/tests/test_filelist.py,
+ to ensure setuptools' version of FileList keeps parity with distutils.
+ """
+
+ def setup_method(self, method):
+ super(TestFileListTest, self).setup_method(method)
+ self.threshold = log.set_threshold(log.FATAL)
+ self._old_log = log.Log._log
+ log.Log._log = self._log
+ self.logs = []
+
+ def teardown_method(self, method):
+ log.set_threshold(self.threshold)
+ log.Log._log = self._old_log
+ super(TestFileListTest, self).teardown_method(method)
+
+ def _log(self, level, msg, args):
+ if level not in (log.DEBUG, log.INFO, log.WARN, log.ERROR, log.FATAL):
+ raise ValueError('%s wrong log level' % str(level))
+ self.logs.append((level, msg, args))
+
+ def get_logs(self, *levels):
+ def _format(msg, args):
+ if len(args) == 0:
+ return msg
+ return msg % args
+ return [_format(msg, args) for level, msg, args
+ in self.logs if level in levels]
+
+ def clear_logs(self):
+ self.logs = []
+
+ def assertNoWarnings(self):
+ assert self.get_logs(log.WARN) == []
+ self.clear_logs()
+
+ def assertWarnings(self):
+ assert len(self.get_logs(log.WARN)) > 0
+ self.clear_logs()
+
+ def make_files(self, files):
+ for file in files:
+ file = os.path.join(self.temp_dir, file)
+ dirname, basename = os.path.split(file)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+ open(file, 'w').close()
+
+ def test_process_template_line(self):
+ # testing all MANIFEST.in template patterns
+ file_list = FileList()
+ l = make_local_path
+
+ # simulated file list
+ self.make_files([
+ 'foo.tmp', 'ok', 'xo', 'four.txt',
+ 'buildout.cfg',
+ # filelist does not filter out VCS directories,
+ # it's sdist that does
+ l('.hg/last-message.txt'),
+ l('global/one.txt'),
+ l('global/two.txt'),
+ l('global/files.x'),
+ l('global/here.tmp'),
+ l('f/o/f.oo'),
+ l('dir/graft-one'),
+ l('dir/dir2/graft2'),
+ l('dir3/ok'),
+ l('dir3/sub/ok.txt'),
+ ])
+
+ MANIFEST_IN = DALS("""\
+ include ok
+ include xo
+ exclude xo
+ include foo.tmp
+ include buildout.cfg
+ global-include *.x
+ global-include *.txt
+ global-exclude *.tmp
+ recursive-include f *.oo
+ recursive-exclude global *.x
+ graft dir
+ prune dir3
+ """)
+
+ for line in MANIFEST_IN.split('\n'):
+ if not line:
+ continue
+ file_list.process_template_line(line)
+
+ wanted = [
+ 'buildout.cfg',
+ 'four.txt',
+ 'ok',
+ l('.hg/last-message.txt'),
+ l('dir/graft-one'),
+ l('dir/dir2/graft2'),
+ l('f/o/f.oo'),
+ l('global/one.txt'),
+ l('global/two.txt'),
+ ]
+ file_list.sort()
+
+ assert file_list.files == wanted
+
+ def test_exclude_pattern(self):
+ # return False if no match
+ file_list = FileList()
+ assert not file_list.exclude_pattern('*.py')
+
+ # return True if files match
+ file_list = FileList()
+ file_list.files = ['a.py', 'b.py']
+ assert file_list.exclude_pattern('*.py')
+
+ # test excludes
+ file_list = FileList()
+ file_list.files = ['a.py', 'a.txt']
+ file_list.exclude_pattern('*.py')
+ assert file_list.files == ['a.txt']
+
+ def test_include_pattern(self):
+ # return False if no match
+ file_list = FileList()
+ file_list.set_allfiles([])
+ assert not file_list.include_pattern('*.py')
+
+ # return True if files match
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', 'b.txt'])
+ assert file_list.include_pattern('*.py')
+
+ # test * matches all files
+ file_list = FileList()
+ assert file_list.allfiles is None
+ file_list.set_allfiles(['a.py', 'b.txt'])
+ file_list.include_pattern('*')
+ assert file_list.allfiles == ['a.py', 'b.txt']
+
+ def test_process_template(self):
+ l = make_local_path
+ # invalid lines
+ file_list = FileList()
+ for action in ('include', 'exclude', 'global-include',
+ 'global-exclude', 'recursive-include',
+ 'recursive-exclude', 'graft', 'prune', 'blarg'):
+ try:
+ file_list.process_template_line(action)
+ except DistutilsTemplateError:
+ pass
+ except Exception:
+ assert False, "Incorrect error thrown"
+ else:
+ assert False, "Should have thrown an error"
+
+ # include
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')])
+
+ file_list.process_template_line('include *.py')
+ assert file_list.files == ['a.py']
+ self.assertNoWarnings()
+
+ file_list.process_template_line('include *.rb')
+ assert file_list.files == ['a.py']
+ self.assertWarnings()
+
+ # exclude
+ file_list = FileList()
+ file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+
+ file_list.process_template_line('exclude *.py')
+ assert file_list.files == ['b.txt', l('d/c.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('exclude *.rb')
+ assert file_list.files == ['b.txt', l('d/c.py')]
+ self.assertWarnings()
+
+ # global-include
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', 'b.txt', l('d/c.py')])
+
+ file_list.process_template_line('global-include *.py')
+ assert file_list.files == ['a.py', l('d/c.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('global-include *.rb')
+ assert file_list.files == ['a.py', l('d/c.py')]
+ self.assertWarnings()
+
+ # global-exclude
+ file_list = FileList()
+ file_list.files = ['a.py', 'b.txt', l('d/c.py')]
+
+ file_list.process_template_line('global-exclude *.py')
+ assert file_list.files == ['b.txt']
+ self.assertNoWarnings()
+
+ file_list.process_template_line('global-exclude *.rb')
+ assert file_list.files == ['b.txt']
+ self.assertWarnings()
+
+ # recursive-include
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', l('d/b.py'), l('d/c.txt'),
+ l('d/d/e.py')])
+
+ file_list.process_template_line('recursive-include d *.py')
+ assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('recursive-include e *.py')
+ assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ self.assertWarnings()
+
+ # recursive-exclude
+ file_list = FileList()
+ file_list.files = ['a.py', l('d/b.py'), l('d/c.txt'), l('d/d/e.py')]
+
+ file_list.process_template_line('recursive-exclude d *.py')
+ assert file_list.files == ['a.py', l('d/c.txt')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('recursive-exclude e *.py')
+ assert file_list.files == ['a.py', l('d/c.txt')]
+ self.assertWarnings()
+
+ # graft
+ file_list = FileList()
+ file_list.set_allfiles(['a.py', l('d/b.py'), l('d/d/e.py'),
+ l('f/f.py')])
+
+ file_list.process_template_line('graft d')
+ assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('graft e')
+ assert file_list.files == [l('d/b.py'), l('d/d/e.py')]
+ self.assertWarnings()
+
+ # prune
+ file_list = FileList()
+ file_list.files = ['a.py', l('d/b.py'), l('d/d/e.py'), l('f/f.py')]
+
+ file_list.process_template_line('prune d')
+ assert file_list.files == ['a.py', l('f/f.py')]
+ self.assertNoWarnings()
+
+ file_list.process_template_line('prune e')
+ assert file_list.files == ['a.py', l('f/f.py')]
+ self.assertWarnings()