summaryrefslogtreecommitdiff
path: root/numpy
diff options
context:
space:
mode:
authorSebastian Berg <sebastian@sipsolutions.net>2021-12-07 14:55:59 -0600
committerSebastian Berg <sebastian@sipsolutions.net>2021-12-07 14:55:59 -0600
commitb60c577befc16177850c08030d79356710271789 (patch)
tree8bd2f22dd31d45d312285588ccdb9bd797eeeca9 /numpy
parente76052fc852e957f141c2c8090a59753f163a0b8 (diff)
downloadnumpy-b60c577befc16177850c08030d79356710271789.tar.gz
BUG: Fixup rare reducelike cast causing rejection of a valid loop
If byte-swapping occurred, this would lead to identity checks failing. Maybe it will be easier to just not be as strict in the reduce code, but for now fix-up the resolver to ensure identity for reducelike operations (when possible). Also fixes potential decref of uninitialized values.
Diffstat (limited to 'numpy')
-rw-r--r--numpy/core/src/umath/legacy_array_method.c34
-rw-r--r--numpy/core/tests/test_ufunc.py20
2 files changed, 52 insertions, 2 deletions
diff --git a/numpy/core/src/umath/legacy_array_method.c b/numpy/core/src/umath/legacy_array_method.c
index a423823d4..99de63aac 100644
--- a/numpy/core/src/umath/legacy_array_method.c
+++ b/numpy/core/src/umath/legacy_array_method.c
@@ -123,10 +123,40 @@ simple_legacy_resolve_descriptors(
PyArray_Descr **given_descrs,
PyArray_Descr **output_descrs)
{
+ int i = 0;
int nin = method->nin;
int nout = method->nout;
- for (int i = 0; i < nin + nout; i++) {
+ if (nin == 2 && nout == 1 && given_descrs[2] != NULL
+ && dtypes[0] == dtypes[2]) {
+ /*
+ * Could be a reduction, which requires `descr[0] is descr[2]`
+ * (identity) at least currently. This is because `op[0] is op[2]`.
+ * (If the output descriptor is not passed, the below works.)
+ */
+ output_descrs[2] = ensure_dtype_nbo(given_descrs[2]);
+ if (output_descrs[2] == NULL) {
+ Py_CLEAR(output_descrs[2]);
+ return -1;
+ }
+ Py_INCREF(output_descrs[2]);
+ output_descrs[0] = output_descrs[2];
+ if (dtypes[1] == dtypes[2]) {
+ /* Same for the second one (accumulation is stricter) */
+ Py_INCREF(output_descrs[2]);
+ output_descrs[1] = output_descrs[2];
+ }
+ else {
+ output_descrs[1] = ensure_dtype_nbo(given_descrs[1]);
+ if (output_descrs[1] == NULL) {
+ i = 2;
+ goto fail;
+ }
+ }
+ return NPY_NO_CASTING;
+ }
+
+ for (; i < nin + nout; i++) {
if (given_descrs[i] != NULL) {
output_descrs[i] = ensure_dtype_nbo(given_descrs[i]);
}
@@ -146,7 +176,7 @@ simple_legacy_resolve_descriptors(
return NPY_NO_CASTING;
fail:
- for (int i = 0; i < nin + nout; i++) {
+ for (; i >= 0; i--) {
Py_CLEAR(output_descrs[i]);
}
return -1;
diff --git a/numpy/core/tests/test_ufunc.py b/numpy/core/tests/test_ufunc.py
index 500904586..76e4cdcfd 100644
--- a/numpy/core/tests/test_ufunc.py
+++ b/numpy/core/tests/test_ufunc.py
@@ -2171,6 +2171,26 @@ class TestUfunc:
np.multiply.reduce(arr, out=single_res, dtype=np.float32)
assert single_res != res
+ def test_reducelike_output_needs_identical_cast(self):
+ # Checks the case where the we have a simple byte-swap works, maily
+ # tests that this is not rejected directly.
+ # (interesting because we require descriptor identity in reducelikes).
+ arr = np.ones(20, dtype="f8")
+ out = np.empty((), dtype=arr.dtype.newbyteorder())
+ expected = np.add.reduce(arr)
+ np.add.reduce(arr, out=out)
+ assert_array_equal(expected, out)
+ # Check reduceat:
+ out = np.empty(2, dtype=arr.dtype.newbyteorder())
+ expected = np.add.reduceat(arr, [0, 1])
+ np.add.reduceat(arr, [0, 1], out=out)
+ assert_array_equal(expected, out)
+ # And accumulate:
+ out = np.empty(arr.shape, dtype=arr.dtype.newbyteorder())
+ expected = np.add.accumulate(arr)
+ np.add.accumulate(arr, out=out)
+ assert_array_equal(expected, out)
+
def test_reduce_noncontig_output(self):
# Check that reduction deals with non-contiguous output arrays
# appropriately.