summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMark Wiebe <mwwiebe@gmail.com>2011-01-18 12:13:32 -0800
committerMark Wiebe <mwwiebe@gmail.com>2011-01-18 12:13:32 -0800
commitee06d183c407ea315b2eb3ef41ee422b0ea30251 (patch)
tree63f342953c62be3ff4d991a6c2559c11d8233c53
parentcce7e1fcfa49b2fe8e1b9c1530269fdcebade14b (diff)
downloadnumpy-ee06d183c407ea315b2eb3ef41ee422b0ea30251.tar.gz
ENH: core: Start converting ufunc to new iterator, add PyArray_PromoteTypes
-rw-r--r--doc/source/reference/routines.dtype.rst1
-rw-r--r--numpy/add_newdocs.py40
-rw-r--r--numpy/core/code_generators/numpy_api.py1
-rw-r--r--numpy/core/numeric.py3
-rw-r--r--numpy/core/src/multiarray/convert_datatype.c122
-rw-r--r--numpy/core/src/multiarray/dtype_transfer.c16
-rw-r--r--numpy/core/src/multiarray/iterators.c2
-rw-r--r--numpy/core/src/multiarray/multiarraymodule.c30
-rw-r--r--numpy/core/src/multiarray/scalartypes.c.src131
-rw-r--r--numpy/core/src/multiarray/scalartypes.h12
-rw-r--r--numpy/core/src/umath/ufunc_object.c184
-rwxr-xr-xnumpy/testing/print_coercion_tables.py19
12 files changed, 534 insertions, 27 deletions
diff --git a/doc/source/reference/routines.dtype.rst b/doc/source/reference/routines.dtype.rst
index a311f3da5..d02468433 100644
--- a/doc/source/reference/routines.dtype.rst
+++ b/doc/source/reference/routines.dtype.rst
@@ -9,6 +9,7 @@ Data type routines
:toctree: generated/
can_cast
+ promote_types
common_type
obj2sctype
diff --git a/numpy/add_newdocs.py b/numpy/add_newdocs.py
index 5e6a07bb0..92e438249 100644
--- a/numpy/add_newdocs.py
+++ b/numpy/add_newdocs.py
@@ -1178,6 +1178,46 @@ add_newdoc('numpy.core.multiarray', 'can_cast',
""")
+add_newdoc('numpy.core.multiarray', 'promote_types',
+ """
+ promote_types(type1, type2)
+
+ Returns the data type with the smallest size and smallest scalar
+ kind to which both ``type1`` and ``type2`` may be safely cast.
+ The returned data type is always in native byte order. Promotion of
+ string, unicode and void with numbers is disallowed.
+
+ Parameters
+ ----------
+ type1 : dtype or dtype specifier
+ First data type.
+ type2 : dtype or dtype specifier
+ Second data type.
+
+ Returns
+ -------
+ out : dtype
+ The promoted data type.
+
+ Examples
+ --------
+ >>> np.promote_types('f4', 'f8')
+ dtype('float64')
+
+ >>> np.promote_types('i8', 'f4')
+ dtype('float64')
+
+ >>> np.promote_types('>i8', '<c8')
+ dtype('complex128')
+
+ >>> np.promote_types('i1', 'S8')
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ TypeError: invalid type promotion
+
+
+ """)
+
add_newdoc('numpy.core.multiarray','newbuffer',
"""newbuffer(size)
diff --git a/numpy/core/code_generators/numpy_api.py b/numpy/core/code_generators/numpy_api.py
index 74ce72ab8..c3d639f90 100644
--- a/numpy/core/code_generators/numpy_api.py
+++ b/numpy/core/code_generators/numpy_api.py
@@ -294,6 +294,7 @@ multiarray_funcs_api = {
'NpyIter_IterationNeedsAPI': 260,
'PyArray_CastingConverter': 261,
'PyArray_CountNonzero': 262,
+ 'PyArray_PromoteTypes': 263,
}
ufunc_types_api = {
diff --git a/numpy/core/numeric.py b/numpy/core/numeric.py
index a65c62a08..4648f4bb0 100644
--- a/numpy/core/numeric.py
+++ b/numpy/core/numeric.py
@@ -3,7 +3,7 @@ __all__ = ['newaxis', 'ndarray', 'flatiter', 'newiter', 'nested_iters', 'ufunc',
'dtype', 'fromstring', 'fromfile', 'frombuffer',
'int_asbuffer', 'where', 'argwhere',
'concatenate', 'fastCopyAndTranspose', 'lexsort',
- 'set_numeric_ops', 'can_cast',
+ 'set_numeric_ops', 'can_cast', 'promote_types',
'asarray', 'asanyarray', 'ascontiguousarray', 'asfortranarray',
'isfortran', 'empty_like', 'zeros_like',
'correlate', 'convolve', 'inner', 'dot', 'outer', 'vdot',
@@ -212,6 +212,7 @@ concatenate = multiarray.concatenate
fastCopyAndTranspose = multiarray._fastCopyAndTranspose
set_numeric_ops = multiarray.set_numeric_ops
can_cast = multiarray.can_cast
+promote_types = multiarray.promote_types
lexsort = multiarray.lexsort
compare_chararrays = multiarray.compare_chararrays
putmask = multiarray.putmask
diff --git a/numpy/core/src/multiarray/convert_datatype.c b/numpy/core/src/multiarray/convert_datatype.c
index d81502831..abebda8a7 100644
--- a/numpy/core/src/multiarray/convert_datatype.c
+++ b/numpy/core/src/multiarray/convert_datatype.c
@@ -276,6 +276,128 @@ PyArray_CanCastScalar(PyTypeObject *from, PyTypeObject *to)
}
/*NUMPY_API
+ * Produces the smallest size and lowest kind type to which both
+ * input types can be cast.
+ */
+NPY_NO_EXPORT PyArray_Descr *
+PyArray_PromoteTypes(PyArray_Descr *type1, PyArray_Descr *type2)
+{
+ int type_num1 = type1->type_num, type_num2 = type2->type_num, ret_type_num;
+
+ /* If they're built-in types, use the promotion table */
+ if (type_num1 < NPY_NTYPES && type_num2 < NPY_NTYPES) {
+ ret_type_num = _npy_type_promotion_table[type_num1][type_num2];
+ /* The table doesn't handle string/unicode/void, check the result */
+ if (ret_type_num >= 0) {
+ return PyArray_DescrFromType(ret_type_num);
+ }
+ }
+ /* If one or both are user defined, calculate it */
+ else {
+ int skind1 = NPY_NOSCALAR, skind2 = NPY_NOSCALAR, skind;
+
+ if (PyArray_CanCastTo(type2, type1)) {
+ /* Promoted types are always native byte order */
+ if (PyArray_ISNBO(type1->byteorder)) {
+ Py_INCREF(type1);
+ return type1;
+ }
+ else {
+ return PyArray_DescrNewByteorder(type1, NPY_NATIVE);
+ }
+ }
+ else if (PyArray_CanCastTo(type1, type2)) {
+ /* Promoted types are always native byte order */
+ if (PyArray_ISNBO(type2->byteorder)) {
+ Py_INCREF(type2);
+ return type2;
+ }
+ else {
+ return PyArray_DescrNewByteorder(type2, NPY_NATIVE);
+ }
+ }
+
+ /* Convert the 'kind' char into a scalar kind */
+ switch (type1->kind) {
+ case 'b':
+ skind1 = NPY_BOOL_SCALAR;
+ break;
+ case 'u':
+ skind1 = NPY_INTPOS_SCALAR;
+ break;
+ case 'i':
+ skind1 = NPY_INTNEG_SCALAR;
+ break;
+ case 'f':
+ skind1 = NPY_FLOAT_SCALAR;
+ break;
+ case 'c':
+ skind1 = NPY_COMPLEX_SCALAR;
+ break;
+ }
+ switch (type2->kind) {
+ case 'b':
+ skind2 = NPY_BOOL_SCALAR;
+ break;
+ case 'u':
+ skind2 = NPY_INTPOS_SCALAR;
+ break;
+ case 'i':
+ skind2 = NPY_INTNEG_SCALAR;
+ break;
+ case 'f':
+ skind2 = NPY_FLOAT_SCALAR;
+ break;
+ case 'c':
+ skind2 = NPY_COMPLEX_SCALAR;
+ break;
+ }
+
+ /* If both are scalars, there may be a promotion possible */
+ if (skind1 != NPY_NOSCALAR && skind2 != NPY_NOSCALAR) {
+
+ /* Start with the larger scalar kind */
+ skind = (skind1 > skind2) ? skind1 : skind2;
+ ret_type_num = _npy_smallest_type_of_kind_table[skind];
+
+ for (;;) {
+
+ /* If there is no larger type of this kind, try a larger kind */
+ if (ret_type_num < 0) {
+ ++skind;
+ /* Use -1 to signal no promoted type found */
+ if (skind < NPY_NSCALARKINDS) {
+ ret_type_num = _npy_smallest_type_of_kind_table[skind];
+ }
+ else {
+ break;
+ }
+ }
+
+ /* If we found a type to which we can promote both, done! */
+ if (PyArray_CanCastSafely(type_num1, ret_type_num) &&
+ PyArray_CanCastSafely(type_num2, ret_type_num)) {
+ return PyArray_DescrFromType(ret_type_num);
+ }
+
+ /* Try the next larger type of this kind */
+ ret_type_num = _npy_next_larger_type_table[ret_type_num];
+ }
+
+ }
+
+ PyErr_SetString(PyExc_TypeError,
+ "invalid type promotion with custom data type");
+ return NULL;
+ }
+
+ /* TODO: Also combine fields, subarrays, strings, etc */
+
+ PyErr_SetString(PyExc_TypeError, "invalid type promotion");
+ return NULL;
+}
+
+/*NUMPY_API
* Is the typenum valid?
*/
NPY_NO_EXPORT int
diff --git a/numpy/core/src/multiarray/dtype_transfer.c b/numpy/core/src/multiarray/dtype_transfer.c
index fc03c5a38..91680dbc9 100644
--- a/numpy/core/src/multiarray/dtype_transfer.c
+++ b/numpy/core/src/multiarray/dtype_transfer.c
@@ -2423,8 +2423,6 @@ get_setdstzero_transfer_function(int aligned,
*out_stransfer = &_null_to_strided_memset_zero;
}
*out_transferdata = data;
-
- return NPY_SUCCEED;
}
/* If it's exactly one reference, use the decref function */
else if (dst_dtype->type_num == NPY_OBJECT) {
@@ -2434,8 +2432,6 @@ get_setdstzero_transfer_function(int aligned,
*out_stransfer = &_null_to_strided_reference_setzero;
*out_transferdata = NULL;
-
- return NPY_SUCCEED;
}
/* If there are subarrays, need to wrap it */
else if (dst_dtype->subarray != NULL) {
@@ -2467,15 +2463,13 @@ get_setdstzero_transfer_function(int aligned,
}
if (wrap_transfer_function_n_to_n(stransfer, data,
- 0, dst_stride,
- 0, dst_dtype->subarray->base->elsize,
- dst_size,
- out_stransfer, out_transferdata) != NPY_SUCCEED) {
+ 0, dst_stride,
+ 0, dst_dtype->subarray->base->elsize,
+ dst_size,
+ out_stransfer, out_transferdata) != NPY_SUCCEED) {
PyArray_FreeStridedTransferData(data);
return NPY_FAIL;
}
-
- return NPY_SUCCEED;
}
/* If there are fields, need to do each field */
else if (PyDataType_HASFIELDS(dst_dtype)) {
@@ -2489,6 +2483,8 @@ get_setdstzero_transfer_function(int aligned,
out_transferdata,
out_needs_api);
}
+
+ return NPY_SUCCEED;
}
static void
diff --git a/numpy/core/src/multiarray/iterators.c b/numpy/core/src/multiarray/iterators.c
index bb996e3bc..6c1d97e8e 100644
--- a/numpy/core/src/multiarray/iterators.c
+++ b/numpy/core/src/multiarray/iterators.c
@@ -1155,7 +1155,7 @@ iter_array(PyArrayIterObject *it, PyObject *NPY_UNUSED(op))
if (r == NULL) {
return NULL;
}
- if (PyArray_CopyAnyInto(r, it->ao) < 0) {
+ if (PyArray_CopyAnyInto((PyArrayObject *)r, it->ao) < 0) {
Py_DECREF(r);
return NULL;
}
diff --git a/numpy/core/src/multiarray/multiarraymodule.c b/numpy/core/src/multiarray/multiarraymodule.c
index a70bd4657..d558cbd04 100644
--- a/numpy/core/src/multiarray/multiarraymodule.c
+++ b/numpy/core/src/multiarray/multiarraymodule.c
@@ -2194,7 +2194,7 @@ array_can_cast_safely(PyObject *NPY_UNUSED(self), PyObject *args,
static char *kwlist[] = {"from", "to", NULL};
if(!PyArg_ParseTupleAndKeywords(args, kwds, "O&O&", kwlist,
- PyArray_DescrConverter, &d1, PyArray_DescrConverter, &d2)) {
+ PyArray_DescrConverter2, &d1, PyArray_DescrConverter2, &d2)) {
goto finish;
}
if (d1 == NULL || d2 == NULL) {
@@ -2213,6 +2213,31 @@ array_can_cast_safely(PyObject *NPY_UNUSED(self), PyObject *args,
return retobj;
}
+static PyObject *
+array_promote_types(PyObject *NPY_UNUSED(dummy), PyObject *args)
+{
+ PyArray_Descr *d1 = NULL;
+ PyArray_Descr *d2 = NULL;
+ PyObject *ret = NULL;
+ if(!PyArg_ParseTuple(args, "O&O&",
+ PyArray_DescrConverter2, &d1, PyArray_DescrConverter2, &d2)) {
+ goto finish;
+ }
+
+ if (d1 == NULL || d2 == NULL) {
+ PyErr_SetString(PyExc_TypeError,
+ "did not understand one of the types");
+ goto finish;
+ }
+
+ ret = (PyObject *)PyArray_PromoteTypes(d1, d2);
+
+ finish:
+ Py_XDECREF(d1);
+ Py_XDECREF(d2);
+ return ret;
+}
+
#if !defined(NPY_PY3K)
static PyObject *
new_buffer(PyObject *NPY_UNUSED(dummy), PyObject *args)
@@ -2818,6 +2843,9 @@ static struct PyMethodDef array_module_methods[] = {
{"can_cast",
(PyCFunction)array_can_cast_safely,
METH_VARARGS | METH_KEYWORDS, NULL},
+ {"promote_types",
+ (PyCFunction)array_promote_types,
+ METH_VARARGS, NULL},
#if !defined(NPY_PY3K)
{"newbuffer",
(PyCFunction)new_buffer,
diff --git a/numpy/core/src/multiarray/scalartypes.c.src b/numpy/core/src/multiarray/scalartypes.c.src
index c66337602..86f373dba 100644
--- a/numpy/core/src/multiarray/scalartypes.c.src
+++ b/numpy/core/src/multiarray/scalartypes.c.src
@@ -3277,32 +3277,69 @@ NPY_NO_EXPORT PyTypeObject Py@NAME@ArrType_Type = {
*/
NPY_NO_EXPORT char
_npy_scalar_kinds_table[NPY_NTYPES];
+
+/*
+ * This table maps a scalar kind (excluding NPY_NOSCALAR)
+ * to the smallest type number of that kind.
+ */
+NPY_NO_EXPORT char
+_npy_smallest_type_of_kind_table[NPY_NSCALARKINDS];
+
+/*
+ * This table gives the type of the same kind, but next in the sequence
+ * of sizes.
+ */
+NPY_NO_EXPORT char
+_npy_next_larger_type_table[NPY_NTYPES];
+
/*
* This table describes safe casting for small type numbers,
* and is used by PyArray_CanCastSafely.
*/
NPY_NO_EXPORT unsigned char
_npy_can_cast_safely_table[NPY_NTYPES][NPY_NTYPES];
+
+/*
+ * This table gives the smallest-size and smallest-kind type to which
+ * the input types may be safely cast, according to _npy_can_cast_safely.
+ */
+NPY_NO_EXPORT char
+_npy_type_promotion_table[NPY_NTYPES][NPY_NTYPES];
#endif
NPY_NO_EXPORT void
initialize_casting_tables(void)
{
- int i;
+ int i, j;
+
+ _npy_smallest_type_of_kind_table[NPY_BOOL_SCALAR] = NPY_BOOL;
+ _npy_smallest_type_of_kind_table[NPY_INTPOS_SCALAR] = NPY_UBYTE;
+ _npy_smallest_type_of_kind_table[NPY_INTNEG_SCALAR] = NPY_BYTE;
+ _npy_smallest_type_of_kind_table[NPY_FLOAT_SCALAR] = NPY_HALF;
+ _npy_smallest_type_of_kind_table[NPY_COMPLEX_SCALAR] = NPY_CFLOAT;
+ _npy_smallest_type_of_kind_table[NPY_OBJECT_SCALAR] = NPY_OBJECT;
/* Default for built-in types is object scalar */
memset(_npy_scalar_kinds_table, PyArray_OBJECT_SCALAR,
sizeof(_npy_scalar_kinds_table));
+ /* Default for next largest type is -1, signalling no bigger */
+ memset(_npy_next_larger_type_table, -1,
+ sizeof(_npy_next_larger_type_table));
/* Compile-time loop of scalar kinds */
/**begin repeat
* #NAME = BOOL, BYTE, UBYTE, SHORT, USHORT, INT, UINT, LONG, ULONG,
* LONGLONG, ULONGLONG, HALF, FLOAT, DOUBLE, LONGDOUBLE,
* CFLOAT, CDOUBLE, CLONGDOUBLE#
+ * #BIGGERTYPE = -1, NPY_SHORT, NPY_USHORT, NPY_INT, NPY_UINT,
+ * NPY_LONG, NPY_ULONG, NPY_LONGLONG, NPY_ULONGLONG,
+ * -1, -1, NPY_FLOAT, NPY_DOUBLE, NPY_LONGDOUBLE, -1,
+ * NPY_CDOUBLE, NPY_CLONGDOUBLE, -1#
* #SCKIND = BOOL, (INTNEG, INTPOS)*5, FLOAT*4,
* COMPLEX*3#
*/
- _npy_scalar_kinds_table[PyArray_@NAME@] = PyArray_@SCKIND@_SCALAR;
+ _npy_scalar_kinds_table[NPY_@NAME@] = PyArray_@SCKIND@_SCALAR;
+ _npy_next_larger_type_table[NPY_@NAME@] = @BIGGERTYPE@;
/**end repeat**/
memset(_npy_can_cast_safely_table, 0, sizeof(_npy_can_cast_safely_table));
@@ -3311,17 +3348,17 @@ initialize_casting_tables(void)
/* Identity */
_npy_can_cast_safely_table[i][i] = 1;
/* Bool -> <Anything> */
- _npy_can_cast_safely_table[PyArray_BOOL][i] = 1;
+ _npy_can_cast_safely_table[NPY_BOOL][i] = 1;
/* DateTime sits out for these... */
if (i != PyArray_DATETIME && i != PyArray_TIMEDELTA) {
/* <Anything> -> Object */
- _npy_can_cast_safely_table[i][PyArray_OBJECT] = 1;
+ _npy_can_cast_safely_table[i][NPY_OBJECT] = 1;
/* <Anything> -> Void */
- _npy_can_cast_safely_table[i][PyArray_VOID] = 1;
+ _npy_can_cast_safely_table[i][NPY_VOID] = 1;
}
}
- _npy_can_cast_safely_table[PyArray_STRING][PyArray_UNICODE] = 1;
+ _npy_can_cast_safely_table[NPY_STRING][NPY_UNICODE] = 1;
#ifndef NPY_SIZEOF_BYTE
#define NPY_SIZEOF_BYTE 1
@@ -3349,7 +3386,7 @@ initialize_casting_tables(void)
* 1, 1, 1#
*/
#define _FROM_BSIZE NPY_SIZEOF_@FROM_BASENAME@
-#define _FROM_NUM (PyArray_@FROM_NAME@)
+#define _FROM_NUM (NPY_@FROM_NAME@)
_npy_can_cast_safely_table[_FROM_NUM][PyArray_STRING] = 1;
_npy_can_cast_safely_table[_FROM_NUM][PyArray_UNICODE] = 1;
@@ -3375,7 +3412,7 @@ initialize_casting_tables(void)
* 1, 1, 1#
*/
#define _TO_BSIZE NPY_SIZEOF_@TO_BASENAME@
-#define _TO_NUM (PyArray_@TO_NAME@)
+#define _TO_NUM (NPY_@TO_NAME@)
/*
* NOTE: _FROM_BSIZE and _TO_BSIZE are the sizes of the "base type"
@@ -3454,6 +3491,84 @@ initialize_casting_tables(void)
/**end repeat**/
+ /*
+ * Now that the _can_cast_safely table is finished, we can
+ * use it to build the _type_promotion table
+ */
+ for (i = 0; i < NPY_NTYPES; ++i) {
+ _npy_type_promotion_table[i][i] = i;
+ /* Don't let number promote to string/unicode/void */
+ if (i == NPY_STRING || i == NPY_UNICODE || i == NPY_VOID) {
+ for (j = i+1; j < NPY_NTYPES; ++j) {
+ _npy_type_promotion_table[i][j] = -1;
+ _npy_type_promotion_table[j][i] = -1;
+ }
+ }
+ else {
+ for (j = i+1; j < NPY_NTYPES; ++j) {
+ /* Don't let number promote to string/unicode/void */
+ if (j == NPY_STRING || j == NPY_UNICODE || j == NPY_VOID) {
+ _npy_type_promotion_table[i][j] = -1;
+ _npy_type_promotion_table[j][i] = -1;
+ }
+ else if (_npy_can_cast_safely_table[i][j]) {
+ _npy_type_promotion_table[i][j] = j;
+ _npy_type_promotion_table[j][i] = j;
+ }
+ else if (_npy_can_cast_safely_table[j][i]) {
+ _npy_type_promotion_table[i][j] = i;
+ _npy_type_promotion_table[j][i] = i;
+ }
+ else {
+ int k, iskind, jskind, skind;
+ iskind = _npy_scalar_kinds_table[i];
+ jskind = _npy_scalar_kinds_table[j];
+ /* If there's no kind (void/string/etc) */
+ if (iskind == NPY_NOSCALAR || jskind == NPY_NOSCALAR) {
+ k = -1;
+ }
+ else {
+ /* Start with the type of larger kind */
+ if (iskind > jskind) {
+ skind = iskind;
+ k = i;
+ }
+ else {
+ skind = jskind;
+ k = j;
+ }
+ for (;;) {
+ /* Try the next larger type of this kind */
+ k = _npy_next_larger_type_table[k];
+
+ /* If there is no larger, try a larger kind */
+ if (k < 0) {
+ ++skind;
+ /* Use -1 to signal no promoted type found */
+ if (skind < NPY_NSCALARKINDS) {
+ k = _npy_smallest_type_of_kind_table[skind];
+ }
+ else {
+ k = -1;
+ break;
+ }
+ }
+
+ if (_npy_can_cast_safely_table[i][k] &&
+ _npy_can_cast_safely_table[j][k]) {
+ break;
+ }
+ }
+ }
+ _npy_type_promotion_table[i][j] = k;
+ _npy_type_promotion_table[j][i] = k;
+ }
+ }
+ }
+ }
+ /* Special case date-time */
+ _npy_type_promotion_table[NPY_DATETIME][NPY_TIMEDELTA] = NPY_DATETIME;
+ _npy_type_promotion_table[NPY_TIMEDELTA][NPY_DATETIME] = NPY_DATETIME;
}
diff --git a/numpy/core/src/multiarray/scalartypes.h b/numpy/core/src/multiarray/scalartypes.h
index c60f61dfb..b7aa094b4 100644
--- a/numpy/core/src/multiarray/scalartypes.h
+++ b/numpy/core/src/multiarray/scalartypes.h
@@ -7,11 +7,23 @@ extern NPY_NO_EXPORT unsigned char
_npy_can_cast_safely_table[NPY_NTYPES][NPY_NTYPES];
extern NPY_NO_EXPORT char
_npy_scalar_kinds_table[NPY_NTYPES];
+extern NPY_NO_EXPORT char
+_npy_type_promotion_table[NPY_NTYPES][NPY_NTYPES];
+extern NPY_NO_EXPORT char
+_npy_smallest_type_of_kind_table[NPY_NSCALARKINDS];
+extern NPY_NO_EXPORT char
+_npy_next_larger_type_table[NPY_NTYPES];
#else
NPY_NO_EXPORT unsigned char
_npy_can_cast_safely_table[NPY_NTYPES][NPY_NTYPES];
NPY_NO_EXPORT char
_npy_scalar_kinds_table[NPY_NTYPES];
+NPY_NO_EXPORT unsigned char
+_npy_type_promotion_table[NPY_NTYPES][NPY_NTYPES];
+NPY_NO_EXPORT char
+_npy_smallest_type_of_kind_table[NPY_NSCALARKINDS];
+NPY_NO_EXPORT char
+_npy_next_larger_type_table[NPY_NTYPES];
#endif
NPY_NO_EXPORT void
diff --git a/numpy/core/src/umath/ufunc_object.c b/numpy/core/src/umath/ufunc_object.c
index 39b04db73..d2d57d125 100644
--- a/numpy/core/src/umath/ufunc_object.c
+++ b/numpy/core/src/umath/ufunc_object.c
@@ -3500,6 +3500,187 @@ _find_array_wrap(PyObject *args, PyObject **output_wrap, int nin, int nout)
}
static PyObject *
+ufunc_generic_call_iter(PyUFuncObject *self, PyObject *args, PyObject *kwds)
+{
+ npy_intp nargs, nin = self->nin, nout = self->nout;
+ npy_intp i, niter = nin + nout;
+ PyObject *obj, *context;
+ char *ufunc_name;
+ /* This contains the all the inputs and outputs */
+ PyArrayObject *op[NPY_MAXARGS];
+ PyArray_Descr *dtype[NPY_MAXARGS];
+
+ /* TODO: For 1.6, the default should probably be NPY_CORDER */
+ NPY_ORDER order = NPY_KEEPORDER;
+ NPY_CASTING casting = NPY_SAFE_CASTING;
+ PyObject *extobj = NULL, *typetup = NULL;
+
+ ufunc_name = self->name ? self->name : "";
+
+ /* Check number of arguments */
+ nargs = PyTuple_Size(args);
+ if ((nargs < self->nin) || (nargs > self->nargs)) {
+ PyErr_SetString(PyExc_ValueError, "invalid number of arguments");
+ return NULL;
+ }
+
+ if (self->core_enabled) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "core_enabled (generalized ufunc) not supported yet");
+ return NULL;
+ }
+
+ /* Initialize all the operands and dtypes to NULL */
+ for (i = 0; i < niter; ++i) {
+ op[i] = NULL;
+ dtype[i] = NULL;
+ }
+
+ /* Get input arguments */
+ for(i = 0; i < nin; ++i) {
+ obj = PyTuple_GET_ITEM(args, i);
+ if (!PyArray_Check(obj) && !PyArray_IsScalar(obj, Generic)) {
+ /*
+ * TODO: There should be a comment here explaining what
+ * context does.
+ */
+ context = Py_BuildValue("OOi", self, args, i);
+ if (context == NULL) {
+ return NULL;
+ }
+ }
+ else {
+ context = NULL;
+ }
+ op[i] = (PyArrayObject *)PyArray_FromAny(obj, NULL, 0, 0, 0, context);
+
+ /* Start with a native byte-order data type */
+ if (PyArray_ISNBO(PyArray_DESCR(op[i])->byteorder)) {
+ dtype[i] = PyArray_DESCR(op[i]);
+ Py_INCREF(dtype[i]);
+ }
+ else {
+ dtype[i] = PyArray_DescrNewByteorder(PyArray_DESCR(op[i]),
+ NPY_NATIVE);
+ }
+ }
+
+ /* Get positional output arguments */
+ for (i = nin; i < nargs; ++i) {
+ obj = PyTuple_GET_ITEM(args, i);
+ /* Translate None to NULL */
+ if (obj == Py_None) {
+ continue;
+ }
+ /* If it's an array, can use it */
+ if (PyArray_Check(obj)) {
+ if (!PyArray_ISWRITEABLE(obj)) {
+ PyErr_SetString(PyExc_ValueError,
+ "return array is not writeable");
+ goto fail;
+ }
+ Py_INCREF(obj);
+ op[i] = (PyArrayObject *)obj;
+ }
+ else {
+ PyErr_SetString(PyExc_TypeError,
+ "return arrays must be "
+ "of ArrayType");
+ goto fail;
+ }
+ }
+
+ /* Get keyword output and other arguments.
+ * Raise an error if anything else is present in the
+ * keyword dictionary.
+ */
+ if (kwds != NULL) {
+ PyObject *key, *value;
+ Py_ssize_t pos = 0;
+ while (PyDict_Next(kwds, &pos, &key, &value)) {
+ Py_ssize_t length = 0;
+ char *str = NULL;
+ int bad_arg = 1;
+
+ if (PyString_AsStringAndSize(key, &str, &length) == -1) {
+ PyErr_SetString(PyExc_TypeError, "invalid keyword argument");
+ goto fail;
+ }
+
+ switch (str[0]) {
+ case 'c':
+ if (strncmp(str,"casting",7) == 0) {
+ if (!PyArray_CastingConverter(value, &casting)) {
+ goto fail;
+ }
+ bad_arg = 0;
+ }
+ break;
+ case 'e':
+ if (strncmp(str,"extobj",6) == 0) {
+ extobj = value;
+ bad_arg = 0;
+ }
+ break;
+ case 'o':
+ if (strncmp(str,"out",3) == 0) {
+ if (op[nin] != NULL) {
+ PyErr_SetString(PyExc_ValueError,
+ "cannot specify 'out' as both a positional "
+ "and keyword argument");
+ goto fail;
+ }
+ else {
+ if (PyArray_Check(value)) {
+ if (!PyArray_ISWRITEABLE(value)) {
+ PyErr_SetString(PyExc_ValueError,
+ "return array is not writeable");
+ goto fail;
+ }
+ Py_INCREF(value);
+ op[nin] = (PyArrayObject *)value;
+ }
+ else {
+ PyErr_SetString(PyExc_TypeError,
+ "return arrays must be "
+ "of ArrayType");
+ goto fail;
+ }
+ }
+ }
+ else if (strncmp(str,"order",5) == 0) {
+ if (!PyArray_OrderConverter(value, &order)) {
+ goto fail;
+ }
+ bad_arg = 0;
+ }
+ break;
+ case 's':
+ if (strncmp(str,"sig",3) == 0) {
+ typetup = value;
+ bad_arg = 0;
+ }
+ break;
+ }
+
+ if (bad_arg) {
+ char *format = "'%s' is an invalid keyword to %s";
+ PyErr_Format(PyExc_TypeError, format, str, ufunc_name);
+ goto fail;
+ }
+ }
+ }
+
+ return NULL;
+fail:
+ for (i = 0; i < niter; ++i) {
+ Py_XDECREF(op[i]);
+ Py_XDECREF(dtype[i]);
+ }
+ return NULL;
+}
+
+static PyObject *
ufunc_generic_call(PyUFuncObject *self, PyObject *args, PyObject *kwds)
{
int i;
@@ -4192,6 +4373,9 @@ static struct PyMethodDef ufunc_methods[] = {
{"outer",
(PyCFunction)ufunc_outer,
METH_VARARGS | METH_KEYWORDS, NULL},
+ {"gennew", /* new generic call */
+ (PyCFunction)ufunc_generic_call_iter,
+ METH_VARARGS | METH_KEYWORDS, NULL},
{NULL, NULL, 0, NULL} /* sentinel */
};
diff --git a/numpy/testing/print_coercion_tables.py b/numpy/testing/print_coercion_tables.py
index 0c8a87d9a..7b5320d7e 100755
--- a/numpy/testing/print_coercion_tables.py
+++ b/numpy/testing/print_coercion_tables.py
@@ -14,6 +14,8 @@ class GenericObject:
def __radd__(self, other):
return self
+ dtype = np.dtype('O')
+
def print_cancast_table(ntypes):
print 'X',
for char in ntypes: print char,
@@ -24,7 +26,7 @@ def print_cancast_table(ntypes):
print int(np.can_cast(row, col)),
print
-def print_coercion_table(ntypes, inputfirstvalue, inputsecondvalue, firstarray):
+def print_coercion_table(ntypes, inputfirstvalue, inputsecondvalue, firstarray, use_promote_types=False):
print '+',
for char in ntypes: print char,
print
@@ -46,11 +48,14 @@ def print_coercion_table(ntypes, inputfirstvalue, inputsecondvalue, firstarray):
else:
rowvalue = rowtype(inputfirstvalue)
colvalue = coltype(inputsecondvalue)
- value = np.add(rowvalue,colvalue)
- if isinstance(value, np.ndarray):
- char = value.dtype.char
+ if use_promote_types:
+ char = np.promote_types(rowvalue.dtype, colvalue.dtype).char
else:
- char = np.dtype(type(value)).char
+ value = np.add(rowvalue,colvalue)
+ if isinstance(value, np.ndarray):
+ char = value.dtype.char
+ else:
+ char = np.dtype(type(value)).char
except ValueError:
char = '!'
except OverflowError:
@@ -76,4 +81,6 @@ print_coercion_table(np.typecodes['All'], 0, 0, True)
print
print "array + neg scalar"
print_coercion_table(np.typecodes['All'], 0, -1, True)
-
+print
+print "promote_types"
+print_coercion_table(np.typecodes['All'], 0, 0, False, True)