summaryrefslogtreecommitdiff
path: root/numpy/lib
diff options
context:
space:
mode:
authorCharles Harris <charlesr.harris@gmail.com>2017-09-21 13:33:25 -0500
committerGitHub <noreply@github.com>2017-09-21 13:33:25 -0500
commit92d08dbcc9f489e54aeab8525b4b30f7b5d01ecf (patch)
treefe1476d1a20a476b50a027c9ae5f4f6bf0e68055 /numpy/lib
parent54232da2f1c1d65f01c747eb7d1dda8fd46d6a63 (diff)
parent1f4ba5bee1240cc7a888087dc06acb7709f12870 (diff)
downloadnumpy-92d08dbcc9f489e54aeab8525b4b30f7b5d01ecf.tar.gz
Merge pull request #9408 from eric-wieser/gradient-fix
BUG: various fixes to np.gradient
Diffstat (limited to 'numpy/lib')
-rw-r--r--numpy/lib/function_base.py19
-rw-r--r--numpy/lib/tests/test_function_base.py9
2 files changed, 20 insertions, 8 deletions
diff --git a/numpy/lib/function_base.py b/numpy/lib/function_base.py
index 93cbd69dd..905e60512 100644
--- a/numpy/lib/function_base.py
+++ b/numpy/lib/function_base.py
@@ -1676,23 +1676,28 @@ def gradient(f, *varargs, **kwargs):
len_axes = len(axes)
n = len(varargs)
if n == 0:
+ # no spacing argument - use 1 in all axes
dx = [1.0] * len_axes
- elif n == len_axes or (n == 1 and np.isscalar(varargs[0])):
+ elif n == 1 and np.ndim(varargs[0]) == 0:
+ # single scalar for all axes
+ dx = varargs * len_axes
+ elif n == len_axes:
+ # scalar or 1d array for each axis
dx = list(varargs)
for i, distances in enumerate(dx):
- if np.isscalar(distances):
+ if np.ndim(distances) == 0:
continue
+ elif np.ndim(distances) != 1:
+ raise ValueError("distances must be either scalars or 1d")
if len(distances) != f.shape[axes[i]]:
- raise ValueError("distances must be either scalars or match "
+ raise ValueError("when 1d, distances must match "
"the length of the corresponding dimension")
- diffx = np.diff(dx[i])
+ diffx = np.diff(distances)
# if distances are constant reduce to the scalar case
# since it brings a consistent speedup
if (diffx == diffx[0]).all():
diffx = diffx[0]
dx[i] = diffx
- if len(dx) == 1:
- dx *= len_axes
else:
raise TypeError("invalid number of arguments")
@@ -1736,7 +1741,7 @@ def gradient(f, *varargs, **kwargs):
# result allocation
out = np.empty_like(f, dtype=otype)
- uniform_spacing = np.isscalar(dx[i])
+ uniform_spacing = np.ndim(dx[i]) == 0
# Numerical differentiation: 2nd order interior
slice1[axis] = slice(1, -1)
diff --git a/numpy/lib/tests/test_function_base.py b/numpy/lib/tests/test_function_base.py
index 4c90abbf6..c64081088 100644
--- a/numpy/lib/tests/test_function_base.py
+++ b/numpy/lib/tests/test_function_base.py
@@ -804,8 +804,11 @@ class TestGradient(object):
# distances must be scalars or have size equal to gradient[axis]
gradient(np.arange(5), 3.)
+ gradient(np.arange(5), np.array(3.))
gradient(np.arange(5), dx)
- gradient(f_2d, 1.5) # dy is set equal to dx because scalar
+ # dy is set equal to dx because scalar
+ gradient(f_2d, 1.5)
+ gradient(f_2d, np.array(1.5))
gradient(f_2d, dx_uneven, dx_uneven)
# mix between even and uneven spaces and
@@ -815,6 +818,10 @@ class TestGradient(object):
# 2D but axis specified
gradient(f_2d, dx, axis=1)
+ # 2d coordinate arguments are not yet allowed
+ assert_raises_regex(ValueError, '.*scalars or 1d',
+ gradient, f_2d, np.stack([dx]*2, axis=-1), 1)
+
def test_badargs(self):
f_2d = np.arange(25).reshape(5, 5)
x = np.cumsum(np.ones(5))