summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.appveyor.yml4
-rw-r--r--.travis.yml3
-rw-r--r--azure-pipelines.yml6
-rw-r--r--numpy/core/overrides.py37
-rw-r--r--numpy/core/shape_base.py9
-rw-r--r--numpy/core/tests/test_datetime.py19
-rw-r--r--numpy/core/tests/test_multiarray.py32
-rw-r--r--numpy/core/tests/test_numerictypes.py15
-rw-r--r--numpy/core/tests/test_overrides.py33
-rw-r--r--numpy/core/tests/test_records.py23
-rw-r--r--numpy/core/tests/test_regression.py44
-rw-r--r--numpy/core/tests/test_shape_base.py12
-rw-r--r--numpy/distutils/misc_util.py5
-rw-r--r--numpy/matrixlib/tests/test_masked_matrix.py9
-rwxr-xr-xtools/travis-before-install.sh5
-rwxr-xr-xtools/travis-test.sh9
16 files changed, 205 insertions, 60 deletions
diff --git a/.appveyor.yml b/.appveyor.yml
index f5696e950..01440c6a0 100644
--- a/.appveyor.yml
+++ b/.appveyor.yml
@@ -39,11 +39,13 @@ environment:
PYTHON_VERSION: 3.6
PYTHON_ARCH: 64
TEST_MODE: full
+ INSTALL_PICKLE5: 1
- PYTHON: C:\Python37-x64
PYTHON_VERSION: 3.7
PYTHON_ARCH: 64
TEST_MODE: full
+ INSTALL_PICKLE5: 1
init:
- "ECHO %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%"
@@ -101,6 +103,8 @@ install:
# Upgrade to the latest pip.
- 'python -m pip install -U pip setuptools wheel'
+ - if [%INSTALL_PICKLE5%]==[1] echo pickle5 >> tools/ci/appveyor/requirements.txt
+
# Install the numpy test dependencies.
- 'pip install -U --timeout 5 --retries 2 -r tools/ci/appveyor/requirements.txt'
diff --git a/.travis.yml b/.travis.yml
index 7c8ec2ee0..cf67134d3 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -39,6 +39,7 @@ matrix:
- python: 3.7
dist: xenial # Required for Python 3.7
sudo: true # travis-ci/travis-ci#9069
+ env: INSTALL_PICKLE5=1
- python: 3.6
env: USE_CHROOT=1 ARCH=i386 DIST=bionic PYTHON=3.6
sudo: true
@@ -61,7 +62,7 @@ matrix:
- python3-dev
- python3-setuptools
- python: 3.6
- env: USE_WHEEL=1 RUN_FULL_TESTS=1 RUN_COVERAGE=1
+ env: USE_WHEEL=1 RUN_FULL_TESTS=1 RUN_COVERAGE=1 INSTALL_PICKLE5=1
- python: 2.7
env: USE_WHEEL=1 RUN_FULL_TESTS=1 PYTHON_OPTS="-3 -OO"
- python: 3.6
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index c99b56e37..14a83b70b 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -30,7 +30,7 @@ jobs:
displayName: 'make gfortran available on mac os vm'
- script: python -m pip install --upgrade pip setuptools wheel
displayName: 'Install tools'
- - script: python -m pip install cython nose pytz pytest
+ - script: python -m pip install cython nose pytz pytest pickle5
displayName: 'Install dependencies; some are optional to avoid test skips'
# NOTE: init_dgelsd failed init issue with current ACCELERATE /
# LAPACK configuration on Azure macos image; at the time of writing
@@ -80,10 +80,12 @@ jobs:
PYTHON_VERSION: '3.6'
PYTHON_ARCH: 'x64'
TEST_MODE: full
+ INSTALL_PICKLE5: 1
Python37-64bit-full:
PYTHON_VERSION: '3.7'
PYTHON_ARCH: 'x64'
TEST_MODE: full
+ INSTALL_PICKLE5: 1
steps:
- task: UsePythonVersion@0
inputs:
@@ -104,6 +106,8 @@ jobs:
displayName: 'Install dependencies; some are optional to avoid test skips'
# NOTE: for Windows builds it seems much more tractable to use runtests.py
# vs. manual setup.py and then runtests.py for testing only
+ - script: if [%INSTALL_PICKLE5%]==[1] python -m pip install pickle5
+ displayName: 'Install optional pickle5 backport (only for python3.6 and 3.7)'
- script: python runtests.py --show-build-log --mode=$(TEST_MODE) -- -rsx --junitxml=junit/test-results.xml
displayName: 'Build NumPy & Run Full NumPy Test Suite'
- task: PublishTestResults@2
diff --git a/numpy/core/overrides.py b/numpy/core/overrides.py
index 17e3d475f..906292613 100644
--- a/numpy/core/overrides.py
+++ b/numpy/core/overrides.py
@@ -2,8 +2,11 @@
TODO: rewrite this in C for performance.
"""
+import collections
import functools
+
from numpy.core.multiarray import ndarray
+from numpy.compat._inspect import getargspec
_NDARRAY_ARRAY_FUNCTION = ndarray.__array_function__
@@ -107,13 +110,45 @@ def array_function_implementation_or_override(
.format(public_api, list(map(type, overloaded_args))))
-def array_function_dispatch(dispatcher):
+ArgSpec = collections.namedtuple('ArgSpec', 'args varargs keywords defaults')
+
+
+def verify_matching_signatures(implementation, dispatcher):
+ """Verify that a dispatcher function has the right signature."""
+ implementation_spec = ArgSpec(*getargspec(implementation))
+ dispatcher_spec = ArgSpec(*getargspec(dispatcher))
+
+ if (implementation_spec.args != dispatcher_spec.args or
+ implementation_spec.varargs != dispatcher_spec.varargs or
+ implementation_spec.keywords != dispatcher_spec.keywords or
+ (bool(implementation_spec.defaults) !=
+ bool(dispatcher_spec.defaults)) or
+ (implementation_spec.defaults is not None and
+ len(implementation_spec.defaults) !=
+ len(dispatcher_spec.defaults))):
+ raise RuntimeError('implementation and dispatcher for %s have '
+ 'different function signatures' % implementation)
+
+ if implementation_spec.defaults is not None:
+ if dispatcher_spec.defaults != (None,) * len(dispatcher_spec.defaults):
+ raise RuntimeError('dispatcher functions can only use None for '
+ 'default argument values')
+
+
+def array_function_dispatch(dispatcher, verify=True):
"""Decorator for adding dispatch with the __array_function__ protocol."""
def decorator(implementation):
+ # TODO: only do this check when the appropriate flag is enabled or for
+ # a dev install. We want this check for testing but don't want to
+ # slow down all numpy imports.
+ if verify:
+ verify_matching_signatures(implementation, dispatcher)
+
@functools.wraps(implementation)
def public_api(*args, **kwargs):
relevant_args = dispatcher(*args, **kwargs)
return array_function_implementation_or_override(
implementation, public_api, relevant_args, args, kwargs)
return public_api
+
return decorator
diff --git a/numpy/core/shape_base.py b/numpy/core/shape_base.py
index 52717abda..feb1605bc 100644
--- a/numpy/core/shape_base.py
+++ b/numpy/core/shape_base.py
@@ -617,4 +617,11 @@ def block(arrays):
_block_format_index(bottom_index)
)
)
- return _block(arrays, list_ndim, max(arr_ndim, list_ndim))
+ result = _block(arrays, list_ndim, max(arr_ndim, list_ndim))
+ if list_ndim == 0:
+ # Catch an edge case where _block returns a view because
+ # `arrays` is a single numpy array and not a list of numpy arrays.
+ # This might copy scalars or lists twice, but this isn't a likely
+ # usecase for those interested in performance
+ result = result.copy()
+ return result
diff --git a/numpy/core/tests/test_datetime.py b/numpy/core/tests/test_datetime.py
index c4918f955..6303c043a 100644
--- a/numpy/core/tests/test_datetime.py
+++ b/numpy/core/tests/test_datetime.py
@@ -623,14 +623,17 @@ class TestDateTime(object):
def test_pickle(self):
# Check that pickle roundtripping works
- dt = np.dtype('M8[7D]')
- assert_equal(pickle.loads(pickle.dumps(dt)), dt)
- dt = np.dtype('M8[W]')
- assert_equal(pickle.loads(pickle.dumps(dt)), dt)
- scalar = np.datetime64('2016-01-01T00:00:00.000000000')
- assert_equal(pickle.loads(pickle.dumps(scalar)), scalar)
- delta = scalar - np.datetime64('2015-01-01T00:00:00.000000000')
- assert_equal(pickle.loads(pickle.dumps(delta)), delta)
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ dt = np.dtype('M8[7D]')
+ assert_equal(pickle.loads(pickle.dumps(dt, protocol=proto)), dt)
+ dt = np.dtype('M8[W]')
+ assert_equal(pickle.loads(pickle.dumps(dt, protocol=proto)), dt)
+ scalar = np.datetime64('2016-01-01T00:00:00.000000000')
+ assert_equal(pickle.loads(pickle.dumps(scalar, protocol=proto)),
+ scalar)
+ delta = scalar - np.datetime64('2015-01-01T00:00:00.000000000')
+ assert_equal(pickle.loads(pickle.dumps(delta, protocol=proto)),
+ delta)
# Check that loading pickles from 1.6 works
pkl = b"cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
diff --git a/numpy/core/tests/test_multiarray.py b/numpy/core/tests/test_multiarray.py
index 55b06e68e..e8353a702 100644
--- a/numpy/core/tests/test_multiarray.py
+++ b/numpy/core/tests/test_multiarray.py
@@ -1372,12 +1372,13 @@ class TestZeroSizeFlexible(object):
def test_pickle(self):
import pickle
- for dt in [bytes, np.void, unicode]:
- zs = self._zeros(10, dt)
- p = pickle.dumps(zs)
- zs2 = pickle.loads(p)
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ for dt in [bytes, np.void, unicode]:
+ zs = self._zeros(10, dt)
+ p = pickle.dumps(zs, protocol=proto)
+ zs2 = pickle.loads(p)
- assert_equal(zs.dtype, zs2.dtype)
+ assert_equal(zs.dtype, zs2.dtype)
class TestMethods(object):
@@ -3550,16 +3551,19 @@ class TestSubscripting(object):
class TestPickling(object):
def test_roundtrip(self):
import pickle
- carray = np.array([[2, 9], [7, 0], [3, 8]])
- DATA = [
- carray,
- np.transpose(carray),
- np.array([('xxx', 1, 2.0)], dtype=[('a', (str, 3)), ('b', int),
- ('c', float)])
- ]
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ carray = np.array([[2, 9], [7, 0], [3, 8]])
+ DATA = [
+ carray,
+ np.transpose(carray),
+ np.array([('xxx', 1, 2.0)], dtype=[('a', (str, 3)), ('b', int),
+ ('c', float)])
+ ]
- for a in DATA:
- assert_equal(a, pickle.loads(a.dumps()), err_msg="%r" % a)
+ for a in DATA:
+ assert_equal(
+ a, pickle.loads(pickle.dumps(a, protocol=proto)),
+ err_msg="%r" % a)
def _loads(self, obj):
import pickle
diff --git a/numpy/core/tests/test_numerictypes.py b/numpy/core/tests/test_numerictypes.py
index 70871774f..27e4fdeec 100644
--- a/numpy/core/tests/test_numerictypes.py
+++ b/numpy/core/tests/test_numerictypes.py
@@ -475,6 +475,21 @@ class Test_sctype2char(object):
def test_non_type(self):
assert_raises(ValueError, np.sctype2char, 1)
+@pytest.mark.parametrize("rep, expected", [
+ (np.int32, True),
+ (list, False),
+ (1.1, False),
+ (str, True),
+ (np.dtype(np.float64), True),
+ (np.dtype((np.int16, (3, 4))), True),
+ (np.dtype([('a', np.int8)]), True),
+ ])
+def test_issctype(rep, expected):
+ # ensure proper identification of scalar
+ # data-types by issctype()
+ actual = np.issctype(rep)
+ assert_equal(actual, expected)
+
@pytest.mark.skipif(sys.flags.optimize > 1,
reason="no docstrings present to inspect when PYTHONOPTIMIZE/Py_OptimizeFlag > 1")
diff --git a/numpy/core/tests/test_overrides.py b/numpy/core/tests/test_overrides.py
index 7f6157a5b..895f221da 100644
--- a/numpy/core/tests/test_overrides.py
+++ b/numpy/core/tests/test_overrides.py
@@ -7,7 +7,8 @@ import numpy as np
from numpy.testing import (
assert_, assert_equal, assert_raises, assert_raises_regex)
from numpy.core.overrides import (
- get_overloaded_types_and_args, array_function_dispatch)
+ get_overloaded_types_and_args, array_function_dispatch,
+ verify_matching_signatures)
def _get_overloaded_args(relevant_args):
@@ -200,6 +201,36 @@ class TestArrayFunctionDispatch(object):
dispatched_one_arg(array)
+class TestVerifyMatchingSignatures(object):
+
+ def test_verify_matching_signatures(self):
+
+ verify_matching_signatures(lambda x: 0, lambda x: 0)
+ verify_matching_signatures(lambda x=None: 0, lambda x=None: 0)
+ verify_matching_signatures(lambda x=1: 0, lambda x=None: 0)
+
+ with assert_raises(RuntimeError):
+ verify_matching_signatures(lambda a: 0, lambda b: 0)
+ with assert_raises(RuntimeError):
+ verify_matching_signatures(lambda x: 0, lambda x=None: 0)
+ with assert_raises(RuntimeError):
+ verify_matching_signatures(lambda x=None: 0, lambda y=None: 0)
+ with assert_raises(RuntimeError):
+ verify_matching_signatures(lambda x=1: 0, lambda y=1: 0)
+
+ def test_array_function_dispatch(self):
+
+ with assert_raises(RuntimeError):
+ @array_function_dispatch(lambda x: (x,))
+ def f(y):
+ pass
+
+ # should not raise
+ @array_function_dispatch(lambda x: (x,), verify=False)
+ def f(y):
+ pass
+
+
def _new_duck_type_and_implements():
"""Create a duck array type and implements functions."""
HANDLED_FUNCTIONS = {}
diff --git a/numpy/core/tests/test_records.py b/numpy/core/tests/test_records.py
index d7c7d16e3..056d39db8 100644
--- a/numpy/core/tests/test_records.py
+++ b/numpy/core/tests/test_records.py
@@ -378,22 +378,27 @@ class TestRecord(object):
def test_pickle_1(self):
# Issue #1529
a = np.array([(1, [])], dtype=[('a', np.int32), ('b', np.int32, 0)])
- assert_equal(a, pickle.loads(pickle.dumps(a)))
- assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ assert_equal(a, pickle.loads(pickle.dumps(a, protocol=proto)))
+ assert_equal(a[0], pickle.loads(pickle.dumps(a[0],
+ protocol=proto)))
def test_pickle_2(self):
a = self.data
- assert_equal(a, pickle.loads(pickle.dumps(a)))
- assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ assert_equal(a, pickle.loads(pickle.dumps(a, protocol=proto)))
+ assert_equal(a[0], pickle.loads(pickle.dumps(a[0],
+ protocol=proto)))
def test_pickle_3(self):
# Issue #7140
a = self.data
- pa = pickle.loads(pickle.dumps(a[0]))
- assert_(pa.flags.c_contiguous)
- assert_(pa.flags.f_contiguous)
- assert_(pa.flags.writeable)
- assert_(pa.flags.aligned)
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ pa = pickle.loads(pickle.dumps(a[0], protocol=proto))
+ assert_(pa.flags.c_contiguous)
+ assert_(pa.flags.f_contiguous)
+ assert_(pa.flags.writeable)
+ assert_(pa.flags.aligned)
def test_objview_record(self):
# https://github.com/numpy/numpy/issues/2599
diff --git a/numpy/core/tests/test_regression.py b/numpy/core/tests/test_regression.py
index b74216418..947ee5f86 100644
--- a/numpy/core/tests/test_regression.py
+++ b/numpy/core/tests/test_regression.py
@@ -818,8 +818,9 @@ class TestRegression(object):
# Ticket #600
x = np.array(["DROND", "DROND1"], dtype="U6")
el = x[1]
- new = pickle.loads(pickle.dumps(el))
- assert_equal(new, el)
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ new = pickle.loads(pickle.dumps(el, protocol=proto))
+ assert_equal(new, el)
def test_arange_non_native_dtype(self):
# Ticket #616
@@ -1066,11 +1067,12 @@ class TestRegression(object):
def test_dot_alignment_sse2(self):
# Test for ticket #551, changeset r5140
x = np.zeros((30, 40))
- y = pickle.loads(pickle.dumps(x))
- # y is now typically not aligned on a 8-byte boundary
- z = np.ones((1, y.shape[0]))
- # This shouldn't cause a segmentation fault:
- np.dot(z, y)
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ y = pickle.loads(pickle.dumps(x, protocol=proto))
+ # y is now typically not aligned on a 8-byte boundary
+ z = np.ones((1, y.shape[0]))
+ # This shouldn't cause a segmentation fault:
+ np.dot(z, y)
def test_astype_copy(self):
# Ticket #788, changeset r5155
@@ -1280,9 +1282,12 @@ class TestRegression(object):
assert_(test_record_void_scalar == test_record)
- #Test pickle and unpickle of void and record scalars
- assert_(pickle.loads(pickle.dumps(test_string)) == test_string)
- assert_(pickle.loads(pickle.dumps(test_record)) == test_record)
+ # Test pickle and unpickle of void and record scalars
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ assert_(pickle.loads(
+ pickle.dumps(test_string, protocol=proto)) == test_string)
+ assert_(pickle.loads(
+ pickle.dumps(test_record, protocol=proto)) == test_record)
def test_blasdot_uninitialized_memory(self):
# Ticket #950
@@ -1925,11 +1930,12 @@ class TestRegression(object):
def test_pickle_bytes_overwrite(self):
if sys.version_info[0] >= 3:
- data = np.array([1], dtype='b')
- data = pickle.loads(pickle.dumps(data))
- data[0] = 0xdd
- bytestring = "\x01 ".encode('ascii')
- assert_equal(bytestring[0:1], '\x01'.encode('ascii'))
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ data = np.array([1], dtype='b')
+ data = pickle.loads(pickle.dumps(data, protocol=proto))
+ data[0] = 0xdd
+ bytestring = "\x01 ".encode('ascii')
+ assert_equal(bytestring[0:1], '\x01'.encode('ascii'))
def test_pickle_py2_array_latin1_hack(self):
# Check that unpickling hacks in Py3 that support
@@ -2231,10 +2237,10 @@ class TestRegression(object):
def test_pickle_empty_string(self):
# gh-3926
-
- import pickle
- test_string = np.string_('')
- assert_equal(pickle.loads(pickle.dumps(test_string)), test_string)
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ test_string = np.string_('')
+ assert_equal(pickle.loads(
+ pickle.dumps(test_string, protocol=proto)), test_string)
def test_frompyfunc_many_args(self):
# gh-5672
diff --git a/numpy/core/tests/test_shape_base.py b/numpy/core/tests/test_shape_base.py
index 72b3451a4..df819b73f 100644
--- a/numpy/core/tests/test_shape_base.py
+++ b/numpy/core/tests/test_shape_base.py
@@ -191,6 +191,12 @@ class TestVstack(object):
class TestConcatenate(object):
+ def test_returns_copy(self):
+ a = np.eye(3)
+ b = np.concatenate([a])
+ b[0, 0] = 2
+ assert b[0, 0] != a[0, 0]
+
def test_exceptions(self):
# test axis must be in bounds
for ndim in [1, 2, 3]:
@@ -367,6 +373,12 @@ def test_stack():
class TestBlock(object):
+ def test_returns_copy(self):
+ a = np.eye(3)
+ b = np.block(a)
+ b[0, 0] = 2
+ assert b[0, 0] != a[0, 0]
+
def test_block_simple_row_wise(self):
a_2d = np.ones((2, 2))
b_2d = 2 * a_2d
diff --git a/numpy/distutils/misc_util.py b/numpy/distutils/misc_util.py
index b30fc27f7..073e841e8 100644
--- a/numpy/distutils/misc_util.py
+++ b/numpy/distutils/misc_util.py
@@ -84,7 +84,9 @@ def get_num_build_jobs():
Get number of parallel build jobs set by the --parallel command line
argument of setup.py
If the command did not receive a setting the environment variable
- NPY_NUM_BUILD_JOBS checked and if that is unset it returns 1.
+ NPY_NUM_BUILD_JOBS is checked. If that is unset, return the number of
+ processors on the system, with a maximum of 8 (to prevent
+ overloading the system if there a lot of CPUs).
Returns
-------
@@ -97,6 +99,7 @@ def get_num_build_jobs():
cpu_count = len(os.sched_getaffinity(0))
except AttributeError:
cpu_count = multiprocessing.cpu_count()
+ cpu_count = min(cpu_count, 8)
envjobs = int(os.environ.get("NPY_NUM_BUILD_JOBS", cpu_count))
dist = get_distribution()
# may be None during configuration
diff --git a/numpy/matrixlib/tests/test_masked_matrix.py b/numpy/matrixlib/tests/test_masked_matrix.py
index 5ed8044aa..7f84bb2c9 100644
--- a/numpy/matrixlib/tests/test_masked_matrix.py
+++ b/numpy/matrixlib/tests/test_masked_matrix.py
@@ -79,10 +79,11 @@ class TestMaskedMatrix(object):
def test_pickling_subbaseclass(self):
# Test pickling w/ a subclass of ndarray
a = masked_array(np.matrix(list(range(10))), mask=[1, 0, 1, 0, 0] * 2)
- a_pickled = pickle.loads(a.dumps())
- assert_equal(a_pickled._mask, a._mask)
- assert_equal(a_pickled, a)
- assert_(isinstance(a_pickled._data, np.matrix))
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ a_pickled = pickle.loads(pickle.dumps(a, protocol=proto))
+ assert_equal(a_pickled._mask, a._mask)
+ assert_equal(a_pickled, a)
+ assert_(isinstance(a_pickled._data, np.matrix))
def test_count_mean_with_matrix(self):
m = masked_array(np.matrix([[1, 2], [3, 4]]), mask=np.zeros((2, 2)))
diff --git a/tools/travis-before-install.sh b/tools/travis-before-install.sh
index 1b8c370d3..c334e91ae 100755
--- a/tools/travis-before-install.sh
+++ b/tools/travis-before-install.sh
@@ -20,6 +20,11 @@ fi
source venv/bin/activate
python -V
+
+if [ -n "$INSTALL_PICKLE5" ]; then
+ pip install pickle5
+fi
+
pip install --upgrade pip setuptools
pip install nose pytz cython pytest
if [ -n "$USE_ASV" ]; then pip install asv; fi
diff --git a/tools/travis-test.sh b/tools/travis-test.sh
index 84a57b1de..2a16b37a3 100755
--- a/tools/travis-test.sh
+++ b/tools/travis-test.sh
@@ -193,6 +193,11 @@ if [ -n "$USE_WHEEL" ] && [ $# -eq 0 ]; then
pushd dist
pip install --pre --no-index --upgrade --find-links=. numpy
pip install nose pytest
+
+ if [ -n "$INSTALL_PICKLE5" ]; then
+ pip install pickle5
+ fi
+
popd
run_test
elif [ -n "$USE_SDIST" ] && [ $# -eq 0 ]; then
@@ -212,6 +217,10 @@ elif [ -n "$USE_SDIST" ] && [ $# -eq 0 ]; then
pushd dist
pip install numpy*
pip install nose pytest
+ if [ -n "$INSTALL_PICKLE5" ]; then
+ pip install pickle5
+ fi
+
popd
run_test
elif [ -n "$USE_CHROOT" ] && [ $# -eq 0 ]; then