summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEric Wieser <wieser.eric@gmail.com>2019-07-08 20:36:12 -0700
committerGitHub <noreply@github.com>2019-07-08 20:36:12 -0700
commit42a086d93a256bd8df371d7f62a5b23592a25f60 (patch)
tree96a1282c56f27ddc2703bec86014d96e08d66c39
parentaf5a108475fe1969f2b5ee2e0ec81ddc278f6437 (diff)
downloadnumpy-revert-13876-forbid-_add_newdocs-python.tar.gz
Revert "MAINT/BUG/DOC: Fix errors in _add_newdocs (#13876)"revert-13876-forbid-_add_newdocs-python
This reverts commit 7ac7fa9a4621f7392f534b20f0cdd64967e9c7eb.
-rw-r--r--numpy/core/_add_newdocs.py242
-rw-r--r--numpy/core/function_base.py29
-rw-r--r--numpy/core/multiarray.py9
3 files changed, 216 insertions, 64 deletions
diff --git a/numpy/core/_add_newdocs.py b/numpy/core/_add_newdocs.py
index bb0c77471..b01d5dfa8 100644
--- a/numpy/core/_add_newdocs.py
+++ b/numpy/core/_add_newdocs.py
@@ -10,8 +10,6 @@ NOTE: Many of the methods of ndarray have corresponding functions.
"""
from __future__ import division, absolute_import, print_function
-import sys
-
from numpy.core import numerictypes as _numerictypes
from numpy.core import dtype
from numpy.core.function_base import add_newdoc
@@ -1473,58 +1471,57 @@ add_newdoc('numpy.core.multiarray', 'promote_types',
""")
-if sys.version_info.major < 3:
- add_newdoc('numpy.core.multiarray', 'newbuffer',
- """
- newbuffer(size)
+add_newdoc('numpy.core.multiarray', 'newbuffer',
+ """
+ newbuffer(size)
- Return a new uninitialized buffer object.
+ Return a new uninitialized buffer object.
- Parameters
- ----------
- size : int
- Size in bytes of returned buffer object.
+ Parameters
+ ----------
+ size : int
+ Size in bytes of returned buffer object.
- Returns
- -------
- newbuffer : buffer object
- Returned, uninitialized buffer object of `size` bytes.
+ Returns
+ -------
+ newbuffer : buffer object
+ Returned, uninitialized buffer object of `size` bytes.
- """)
+ """)
- add_newdoc('numpy.core.multiarray', 'getbuffer',
- """
- getbuffer(obj [,offset[, size]])
+add_newdoc('numpy.core.multiarray', 'getbuffer',
+ """
+ getbuffer(obj [,offset[, size]])
- Create a buffer object from the given object referencing a slice of
- length size starting at offset.
+ Create a buffer object from the given object referencing a slice of
+ length size starting at offset.
- Default is the entire buffer. A read-write buffer is attempted followed
- by a read-only buffer.
+ Default is the entire buffer. A read-write buffer is attempted followed
+ by a read-only buffer.
- Parameters
- ----------
- obj : object
+ Parameters
+ ----------
+ obj : object
- offset : int, optional
+ offset : int, optional
- size : int, optional
+ size : int, optional
- Returns
- -------
- buffer_obj : buffer
+ Returns
+ -------
+ buffer_obj : buffer
- Examples
- --------
- >>> buf = np.getbuffer(np.ones(5), 1, 3)
- >>> len(buf)
- 3
- >>> buf[0]
- '\\x00'
- >>> buf
- <read-write buffer for 0x8af1e70, size 3, offset 1 at 0x8ba4ec0>
+ Examples
+ --------
+ >>> buf = np.getbuffer(np.ones(5), 1, 3)
+ >>> len(buf)
+ 3
+ >>> buf[0]
+ '\\x00'
+ >>> buf
+ <read-write buffer for 0x8af1e70, size 3, offset 1 at 0x8ba4ec0>
- """)
+ """)
add_newdoc('numpy.core.multiarray', 'c_einsum',
"""
@@ -1990,6 +1987,13 @@ add_newdoc('numpy.core.multiarray', 'ndarray', ('__array_struct__',
"""Array protocol: C-struct side."""))
+add_newdoc('numpy.core.multiarray', 'ndarray', ('_as_parameter_',
+ """Allow the array to be interpreted as a ctypes object by returning the
+ data-memory location as an integer
+
+ """))
+
+
add_newdoc('numpy.core.multiarray', 'ndarray', ('base',
"""
Base object if memory is from some other object.
@@ -3251,6 +3255,87 @@ add_newdoc('numpy.core.multiarray', 'ndarray', ('min',
"""))
+add_newdoc('numpy.core.multiarray', 'shares_memory',
+ """
+ shares_memory(a, b, max_work=None)
+
+ Determine if two arrays share memory
+
+ Parameters
+ ----------
+ a, b : ndarray
+ Input arrays
+ max_work : int, optional
+ Effort to spend on solving the overlap problem (maximum number
+ of candidate solutions to consider). The following special
+ values are recognized:
+
+ max_work=MAY_SHARE_EXACT (default)
+ The problem is solved exactly. In this case, the function returns
+ True only if there is an element shared between the arrays.
+ max_work=MAY_SHARE_BOUNDS
+ Only the memory bounds of a and b are checked.
+
+ Raises
+ ------
+ numpy.TooHardError
+ Exceeded max_work.
+
+ Returns
+ -------
+ out : bool
+
+ See Also
+ --------
+ may_share_memory
+
+ Examples
+ --------
+ >>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
+ False
+
+ """)
+
+
+add_newdoc('numpy.core.multiarray', 'may_share_memory',
+ """
+ may_share_memory(a, b, max_work=None)
+
+ Determine if two arrays might share memory
+
+ A return of True does not necessarily mean that the two arrays
+ share any element. It just means that they *might*.
+
+ Only the memory bounds of a and b are checked by default.
+
+ Parameters
+ ----------
+ a, b : ndarray
+ Input arrays
+ max_work : int, optional
+ Effort to spend on solving the overlap problem. See
+ `shares_memory` for details. Default for ``may_share_memory``
+ is to do a bounds check.
+
+ Returns
+ -------
+ out : bool
+
+ See Also
+ --------
+ shares_memory
+
+ Examples
+ --------
+ >>> np.may_share_memory(np.array([1,2]), np.array([5,8,9]))
+ False
+ >>> x = np.zeros([3, 4])
+ >>> np.may_share_memory(x[:,0], x[:,1])
+ True
+
+ """)
+
+
add_newdoc('numpy.core.multiarray', 'ndarray', ('newbyteorder',
"""
arr.newbyteorder(new_order='S')
@@ -3352,6 +3437,81 @@ add_newdoc('numpy.core.multiarray', 'ndarray', ('put',
"""))
+add_newdoc('numpy.core.multiarray', 'copyto',
+ """
+ copyto(dst, src, casting='same_kind', where=True)
+
+ Copies values from one array to another, broadcasting as necessary.
+
+ Raises a TypeError if the `casting` rule is violated, and if
+ `where` is provided, it selects which elements to copy.
+
+ .. versionadded:: 1.7.0
+
+ Parameters
+ ----------
+ dst : ndarray
+ The array into which values are copied.
+ src : array_like
+ The array from which values are copied.
+ casting : {'no', 'equiv', 'safe', 'same_kind', 'unsafe'}, optional
+ Controls what kind of data casting may occur when copying.
+
+ * 'no' means the data types should not be cast at all.
+ * 'equiv' means only byte-order changes are allowed.
+ * 'safe' means only casts which can preserve values are allowed.
+ * 'same_kind' means only safe casts or casts within a kind,
+ like float64 to float32, are allowed.
+ * 'unsafe' means any data conversions may be done.
+ where : array_like of bool, optional
+ A boolean array which is broadcasted to match the dimensions
+ of `dst`, and selects elements to copy from `src` to `dst`
+ wherever it contains the value True.
+
+ """)
+
+add_newdoc('numpy.core.multiarray', 'putmask',
+ """
+ putmask(a, mask, values)
+
+ Changes elements of an array based on conditional and input values.
+
+ Sets ``a.flat[n] = values[n]`` for each n where ``mask.flat[n]==True``.
+
+ If `values` is not the same size as `a` and `mask` then it will repeat.
+ This gives behavior different from ``a[mask] = values``.
+
+ Parameters
+ ----------
+ a : array_like
+ Target array.
+ mask : array_like
+ Boolean mask array. It has to be the same shape as `a`.
+ values : array_like
+ Values to put into `a` where `mask` is True. If `values` is smaller
+ than `a` it will be repeated.
+
+ See Also
+ --------
+ place, put, take, copyto
+
+ Examples
+ --------
+ >>> x = np.arange(6).reshape(2, 3)
+ >>> np.putmask(x, x>2, x**2)
+ >>> x
+ array([[ 0, 1, 2],
+ [ 9, 16, 25]])
+
+ If `values` is smaller than `a` it is repeated:
+
+ >>> x = np.arange(5)
+ >>> np.putmask(x, x>1, [-33, -44])
+ >>> x
+ array([ 0, 1, -33, -44, -33])
+
+ """)
+
add_newdoc('numpy.core.multiarray', 'ndarray', ('ravel',
"""
diff --git a/numpy/core/function_base.py b/numpy/core/function_base.py
index c1067299d..296213823 100644
--- a/numpy/core/function_base.py
+++ b/numpy/core/function_base.py
@@ -431,13 +431,6 @@ def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0):
#always succeed
-def _add_docstring(obj, doc):
- try:
- add_docstring(obj, doc)
- except Exception:
- pass
-
-
def add_newdoc(place, obj, doc):
"""
Adds documentation to obj which is in module place.
@@ -452,19 +445,21 @@ def add_newdoc(place, obj, doc):
sequence of length two --> [(method1, docstring1),
(method2, docstring2), ...]
- This routine never raises an error if the docstring can't be written, but
- will raise an error if the object being documented does not exist.
+ This routine never raises an error.
This routine cannot modify read-only docstrings, as appear
in new-style classes or built-in functions. Because this
routine never raises an error the caller must check manually
that the docstrings were changed.
"""
- new = getattr(__import__(place, globals(), {}, [obj]), obj)
- if isinstance(doc, str):
- _add_docstring(new, doc.strip())
- elif isinstance(doc, tuple):
- _add_docstring(getattr(new, doc[0]), doc[1].strip())
- elif isinstance(doc, list):
- for val in doc:
- _add_docstring(getattr(new, val[0]), val[1].strip())
+ try:
+ new = getattr(__import__(place, globals(), {}, [obj]), obj)
+ if isinstance(doc, str):
+ add_docstring(new, doc.strip())
+ elif isinstance(doc, tuple):
+ add_docstring(getattr(new, doc[0]), doc[1].strip())
+ elif isinstance(doc, list):
+ for val in doc:
+ add_docstring(getattr(new, val[0]), val[1].strip())
+ except Exception:
+ pass
diff --git a/numpy/core/multiarray.py b/numpy/core/multiarray.py
index c0fcc10ff..4f2c5b78e 100644
--- a/numpy/core/multiarray.py
+++ b/numpy/core/multiarray.py
@@ -7,7 +7,6 @@ by importing from the extension module.
"""
import functools
-import sys
import warnings
import sys
@@ -17,7 +16,7 @@ import numpy as np
from numpy.core._multiarray_umath import *
from numpy.core._multiarray_umath import (
_fastCopyAndTranspose, _flagdict, _insert, _reconstruct, _vec_string,
- _ARRAY_API, _monotonicity, _get_ndarray_c_version
+ _ARRAY_API, _monotonicity
)
__all__ = [
@@ -32,17 +31,15 @@ __all__ = [
'count_nonzero', 'c_einsum', 'datetime_as_string', 'datetime_data',
'digitize', 'dot', 'dragon4_positional', 'dragon4_scientific', 'dtype',
'empty', 'empty_like', 'error', 'flagsobj', 'flatiter', 'format_longfloat',
- 'frombuffer', 'fromfile', 'fromiter', 'fromstring', 'inner',
+ 'frombuffer', 'fromfile', 'fromiter', 'fromstring', 'getbuffer', 'inner',
'int_asbuffer', 'interp', 'interp_complex', 'is_busday', 'lexsort',
'matmul', 'may_share_memory', 'min_scalar_type', 'ndarray', 'nditer',
- 'nested_iters', 'normalize_axis_index', 'packbits',
+ 'nested_iters', 'newbuffer', 'normalize_axis_index', 'packbits',
'promote_types', 'putmask', 'ravel_multi_index', 'result_type', 'scalar',
'set_datetimeparse_function', 'set_legacy_print_mode', 'set_numeric_ops',
'set_string_function', 'set_typeDict', 'shares_memory', 'test_interrupt',
'tracemalloc_domain', 'typeinfo', 'unpackbits', 'unravel_index', 'vdot',
'where', 'zeros']
-if sys.version_info.major < 3:
- __all__ += ['newbuffer', 'getbuffer']
# For backward compatibility, make sure pickle imports these functions from here
_reconstruct.__module__ = 'numpy.core.multiarray'