summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEric Wieser <wieser.eric@gmail.com>2019-07-06 16:08:17 -0700
committerMatti Picus <matti.picus@gmail.com>2019-07-06 16:08:17 -0700
commit7ac7fa9a4621f7392f534b20f0cdd64967e9c7eb (patch)
tree7c277762efd84f59d11e07720ad2995d361ff308
parent57b16ed568581c33d0a6b6d3d4254dce3b67b321 (diff)
downloadnumpy-7ac7fa9a4621f7392f534b20f0cdd64967e9c7eb.tar.gz
MAINT/BUG/DOC: Fix errors in _add_newdocs (#13876)
* BUG: Remove items from `multiarray.__all__` which do not exist on python 3 Avoid using `_add_newdocs` if these functions do not exist. Leaving the version-checking here so that we can backport to 1.16 * BUG: Add missing `np.core.multiarray._get_ndarray_c_version` function This must have been lost in the multiarray / umath merge. Found by noticing that `add_newdocs` was being called on an object that does not exist. * DOC: Remove documentation for property that does not exist `ndarray._as_parameter_` is not a real thing * DOC: Remove docstrings which are duplicated from `numpy/core/multiarray.py` * BUG: Don't silence errors in add_newdoc caused by bad arguments This caught us trying to document members that don't exist.
-rw-r--r--numpy/core/_add_newdocs.py242
-rw-r--r--numpy/core/function_base.py29
-rw-r--r--numpy/core/multiarray.py9
3 files changed, 64 insertions, 216 deletions
diff --git a/numpy/core/_add_newdocs.py b/numpy/core/_add_newdocs.py
index 84339ef23..02700c8ca 100644
--- a/numpy/core/_add_newdocs.py
+++ b/numpy/core/_add_newdocs.py
@@ -10,6 +10,8 @@ NOTE: Many of the methods of ndarray have corresponding functions.
"""
from __future__ import division, absolute_import, print_function
+import sys
+
from numpy.core import numerictypes as _numerictypes
from numpy.core import dtype
from numpy.core.function_base import add_newdoc
@@ -1471,57 +1473,58 @@ add_newdoc('numpy.core.multiarray', 'promote_types',
""")
-add_newdoc('numpy.core.multiarray', 'newbuffer',
- """
- newbuffer(size)
+if sys.version_info.major < 3:
+ add_newdoc('numpy.core.multiarray', 'newbuffer',
+ """
+ newbuffer(size)
- Return a new uninitialized buffer object.
+ Return a new uninitialized buffer object.
- Parameters
- ----------
- size : int
- Size in bytes of returned buffer object.
+ Parameters
+ ----------
+ size : int
+ Size in bytes of returned buffer object.
- Returns
- -------
- newbuffer : buffer object
- Returned, uninitialized buffer object of `size` bytes.
+ Returns
+ -------
+ newbuffer : buffer object
+ Returned, uninitialized buffer object of `size` bytes.
- """)
+ """)
-add_newdoc('numpy.core.multiarray', 'getbuffer',
- """
- getbuffer(obj [,offset[, size]])
+ add_newdoc('numpy.core.multiarray', 'getbuffer',
+ """
+ getbuffer(obj [,offset[, size]])
- Create a buffer object from the given object referencing a slice of
- length size starting at offset.
+ Create a buffer object from the given object referencing a slice of
+ length size starting at offset.
- Default is the entire buffer. A read-write buffer is attempted followed
- by a read-only buffer.
+ Default is the entire buffer. A read-write buffer is attempted followed
+ by a read-only buffer.
- Parameters
- ----------
- obj : object
+ Parameters
+ ----------
+ obj : object
- offset : int, optional
+ offset : int, optional
- size : int, optional
+ size : int, optional
- Returns
- -------
- buffer_obj : buffer
+ Returns
+ -------
+ buffer_obj : buffer
- Examples
- --------
- >>> buf = np.getbuffer(np.ones(5), 1, 3)
- >>> len(buf)
- 3
- >>> buf[0]
- '\\x00'
- >>> buf
- <read-write buffer for 0x8af1e70, size 3, offset 1 at 0x8ba4ec0>
+ Examples
+ --------
+ >>> buf = np.getbuffer(np.ones(5), 1, 3)
+ >>> len(buf)
+ 3
+ >>> buf[0]
+ '\\x00'
+ >>> buf
+ <read-write buffer for 0x8af1e70, size 3, offset 1 at 0x8ba4ec0>
- """)
+ """)
add_newdoc('numpy.core.multiarray', 'c_einsum',
"""
@@ -1987,13 +1990,6 @@ add_newdoc('numpy.core.multiarray', 'ndarray', ('__array_struct__',
"""Array protocol: C-struct side."""))
-add_newdoc('numpy.core.multiarray', 'ndarray', ('_as_parameter_',
- """Allow the array to be interpreted as a ctypes object by returning the
- data-memory location as an integer
-
- """))
-
-
add_newdoc('numpy.core.multiarray', 'ndarray', ('base',
"""
Base object if memory is from some other object.
@@ -3255,87 +3251,6 @@ add_newdoc('numpy.core.multiarray', 'ndarray', ('min',
"""))
-add_newdoc('numpy.core.multiarray', 'shares_memory',
- """
- shares_memory(a, b, max_work=None)
-
- Determine if two arrays share memory
-
- Parameters
- ----------
- a, b : ndarray
- Input arrays
- max_work : int, optional
- Effort to spend on solving the overlap problem (maximum number
- of candidate solutions to consider). The following special
- values are recognized:
-
- max_work=MAY_SHARE_EXACT (default)
- The problem is solved exactly. In this case, the function returns
- True only if there is an element shared between the arrays.
- max_work=MAY_SHARE_BOUNDS
- Only the memory bounds of a and b are checked.
-
- Raises
- ------
- numpy.TooHardError
- Exceeded max_work.
-
- Returns
- -------
- out : bool
-
- See Also
- --------
- may_share_memory
-
- Examples
- --------
- >>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
- False
-
- """)
-
-
-add_newdoc('numpy.core.multiarray', 'may_share_memory',
- """
- may_share_memory(a, b, max_work=None)
-
- Determine if two arrays might share memory
-
- A return of True does not necessarily mean that the two arrays
- share any element. It just means that they *might*.
-
- Only the memory bounds of a and b are checked by default.
-
- Parameters
- ----------
- a, b : ndarray
- Input arrays
- max_work : int, optional
- Effort to spend on solving the overlap problem. See
- `shares_memory` for details. Default for ``may_share_memory``
- is to do a bounds check.
-
- Returns
- -------
- out : bool
-
- See Also
- --------
- shares_memory
-
- Examples
- --------
- >>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
- False
- >>> x = np.zeros([3, 4])
- >>> np.may_share_memory(x[:,0], x[:,1])
- True
-
- """)
-
-
add_newdoc('numpy.core.multiarray', 'ndarray', ('newbyteorder',
"""
arr.newbyteorder(new_order='S')
@@ -3437,81 +3352,6 @@ add_newdoc('numpy.core.multiarray', 'ndarray', ('put',
"""))
-add_newdoc('numpy.core.multiarray', 'copyto',
- """
- copyto(dst, src, casting='same_kind', where=True)
-
- Copies values from one array to another, broadcasting as necessary.
-
- Raises a TypeError if the `casting` rule is violated, and if
- `where` is provided, it selects which elements to copy.
-
- .. versionadded:: 1.7.0
-
- Parameters
- ----------
- dst : ndarray
- The array into which values are copied.
- src : array_like
- The array from which values are copied.
- casting : {'no', 'equiv', 'safe', 'same_kind', 'unsafe'}, optional
- Controls what kind of data casting may occur when copying.
-
- * 'no' means the data types should not be cast at all.
- * 'equiv' means only byte-order changes are allowed.
- * 'safe' means only casts which can preserve values are allowed.
- * 'same_kind' means only safe casts or casts within a kind,
- like float64 to float32, are allowed.
- * 'unsafe' means any data conversions may be done.
- where : array_like of bool, optional
- A boolean array which is broadcasted to match the dimensions
- of `dst`, and selects elements to copy from `src` to `dst`
- wherever it contains the value True.
-
- """)
-
-add_newdoc('numpy.core.multiarray', 'putmask',
- """
- putmask(a, mask, values)
-
- Changes elements of an array based on conditional and input values.
-
- Sets ``a.flat[n] = values[n]`` for each n where ``mask.flat[n]==True``.
-
- If `values` is not the same size as `a` and `mask` then it will repeat.
- This gives behavior different from ``a[mask] = values``.
-
- Parameters
- ----------
- a : array_like
- Target array.
- mask : array_like
- Boolean mask array. It has to be the same shape as `a`.
- values : array_like
- Values to put into `a` where `mask` is True. If `values` is smaller
- than `a` it will be repeated.
-
- See Also
- --------
- place, put, take, copyto
-
- Examples
- --------
- >>> x = np.arange(6).reshape(2, 3)
- >>> np.putmask(x, x>2, x**2)
- >>> x
- array([[ 0, 1, 2],
- [ 9, 16, 25]])
-
- If `values` is smaller than `a` it is repeated:
-
- >>> x = np.arange(5)
- >>> np.putmask(x, x>1, [-33, -44])
- >>> x
- array([ 0, 1, -33, -44, -33])
-
- """)
-
add_newdoc('numpy.core.multiarray', 'ndarray', ('ravel',
"""
diff --git a/numpy/core/function_base.py b/numpy/core/function_base.py
index 296213823..c1067299d 100644
--- a/numpy/core/function_base.py
+++ b/numpy/core/function_base.py
@@ -431,6 +431,13 @@ def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0):
#always succeed
+def _add_docstring(obj, doc):
+ try:
+ add_docstring(obj, doc)
+ except Exception:
+ pass
+
+
def add_newdoc(place, obj, doc):
"""
Adds documentation to obj which is in module place.
@@ -445,21 +452,19 @@ def add_newdoc(place, obj, doc):
sequence of length two --> [(method1, docstring1),
(method2, docstring2), ...]
- This routine never raises an error.
+ This routine never raises an error if the docstring can't be written, but
+ will raise an error if the object being documented does not exist.
This routine cannot modify read-only docstrings, as appear
in new-style classes or built-in functions. Because this
routine never raises an error the caller must check manually
that the docstrings were changed.
"""
- try:
- new = getattr(__import__(place, globals(), {}, [obj]), obj)
- if isinstance(doc, str):
- add_docstring(new, doc.strip())
- elif isinstance(doc, tuple):
- add_docstring(getattr(new, doc[0]), doc[1].strip())
- elif isinstance(doc, list):
- for val in doc:
- add_docstring(getattr(new, val[0]), val[1].strip())
- except Exception:
- pass
+ new = getattr(__import__(place, globals(), {}, [obj]), obj)
+ if isinstance(doc, str):
+ _add_docstring(new, doc.strip())
+ elif isinstance(doc, tuple):
+ _add_docstring(getattr(new, doc[0]), doc[1].strip())
+ elif isinstance(doc, list):
+ for val in doc:
+ _add_docstring(getattr(new, val[0]), val[1].strip())
diff --git a/numpy/core/multiarray.py b/numpy/core/multiarray.py
index 4f2c5b78e..c0fcc10ff 100644
--- a/numpy/core/multiarray.py
+++ b/numpy/core/multiarray.py
@@ -7,6 +7,7 @@ by importing from the extension module.
"""
import functools
+import sys
import warnings
import sys
@@ -16,7 +17,7 @@ import numpy as np
from numpy.core._multiarray_umath import *
from numpy.core._multiarray_umath import (
_fastCopyAndTranspose, _flagdict, _insert, _reconstruct, _vec_string,
- _ARRAY_API, _monotonicity
+ _ARRAY_API, _monotonicity, _get_ndarray_c_version
)
__all__ = [
@@ -31,15 +32,17 @@ __all__ = [
'count_nonzero', 'c_einsum', 'datetime_as_string', 'datetime_data',
'digitize', 'dot', 'dragon4_positional', 'dragon4_scientific', 'dtype',
'empty', 'empty_like', 'error', 'flagsobj', 'flatiter', 'format_longfloat',
- 'frombuffer', 'fromfile', 'fromiter', 'fromstring', 'getbuffer', 'inner',
+ 'frombuffer', 'fromfile', 'fromiter', 'fromstring', 'inner',
'int_asbuffer', 'interp', 'interp_complex', 'is_busday', 'lexsort',
'matmul', 'may_share_memory', 'min_scalar_type', 'ndarray', 'nditer',
- 'nested_iters', 'newbuffer', 'normalize_axis_index', 'packbits',
+ 'nested_iters', 'normalize_axis_index', 'packbits',
'promote_types', 'putmask', 'ravel_multi_index', 'result_type', 'scalar',
'set_datetimeparse_function', 'set_legacy_print_mode', 'set_numeric_ops',
'set_string_function', 'set_typeDict', 'shares_memory', 'test_interrupt',
'tracemalloc_domain', 'typeinfo', 'unpackbits', 'unravel_index', 'vdot',
'where', 'zeros']
+if sys.version_info.major < 3:
+ __all__ += ['newbuffer', 'getbuffer']
# For backward compatibility, make sure pickle imports these functions from here
_reconstruct.__module__ = 'numpy.core.multiarray'