summaryrefslogtreecommitdiff
path: root/numpy
diff options
context:
space:
mode:
authorCharles Harris <charlesr.harris@gmail.com>2017-09-09 15:44:56 -0500
committerGitHub <noreply@github.com>2017-09-09 15:44:56 -0500
commit68a58e03a1661b8d95877771f6e7fa87ddf7c183 (patch)
tree86687005dc5cf44a0576a7682e5babb658dcd3e4 /numpy
parentf2173183ee34a39c1803f6237e5ab9fe909736c4 (diff)
parent9f27418f48ada95a1cf19c98a8d1116f97f86ec5 (diff)
downloadnumpy-68a58e03a1661b8d95877771f6e7fa87ddf7c183.tar.gz
Merge pull request #6053 from ahaldane/multifield_structassign
MAINT: struct assignment "by field position", multi-field indices return views
Diffstat (limited to 'numpy')
-rw-r--r--numpy/core/src/multiarray/array_assign_array.c3
-rw-r--r--numpy/core/src/multiarray/arraytypes.c.src210
-rw-r--r--numpy/core/src/multiarray/descriptor.c2
-rw-r--r--numpy/core/src/multiarray/descriptor.h4
-rw-r--r--numpy/core/src/multiarray/dtype_transfer.c388
-rw-r--r--numpy/core/src/multiarray/mapping.c72
-rw-r--r--numpy/core/src/multiarray/multiarraymodule.c33
-rw-r--r--numpy/core/src/multiarray/scalartypes.c.src42
-rw-r--r--numpy/core/tests/test_dtype.py16
-rw-r--r--numpy/core/tests/test_indexing.py6
-rw-r--r--numpy/core/tests/test_multiarray.py127
-rw-r--r--numpy/core/tests/test_nditer.py103
-rw-r--r--numpy/core/tests/test_records.py17
-rw-r--r--numpy/lib/tests/test_io.py4
-rw-r--r--numpy/ma/core.py5
-rw-r--r--numpy/ma/tests/test_core.py14
16 files changed, 444 insertions, 602 deletions
diff --git a/numpy/core/src/multiarray/array_assign_array.c b/numpy/core/src/multiarray/array_assign_array.c
index 28cc7031a..d1bce8c3b 100644
--- a/numpy/core/src/multiarray/array_assign_array.c
+++ b/numpy/core/src/multiarray/array_assign_array.c
@@ -293,7 +293,8 @@ PyArray_AssignArray(PyArrayObject *dst, PyArrayObject *src,
if (((PyArray_NDIM(dst) == 1 && PyArray_NDIM(src) >= 1 &&
PyArray_STRIDES(dst)[0] *
PyArray_STRIDES(src)[PyArray_NDIM(src) - 1] < 0) ||
- PyArray_NDIM(dst) > 1) && arrays_overlap(src, dst)) {
+ PyArray_NDIM(dst) > 1 || PyArray_HASFIELDS(dst)) &&
+ arrays_overlap(src, dst)) {
PyArrayObject *tmp;
/*
diff --git a/numpy/core/src/multiarray/arraytypes.c.src b/numpy/core/src/multiarray/arraytypes.c.src
index 921fbbe50..43dd101c5 100644
--- a/numpy/core/src/multiarray/arraytypes.c.src
+++ b/numpy/core/src/multiarray/arraytypes.c.src
@@ -774,66 +774,173 @@ VOID_getitem(void *input, void *vap)
NPY_NO_EXPORT int PyArray_CopyObject(PyArrayObject *, PyObject *);
+/* Given a structured PyArrayObject arr, index i and structured datatype descr,
+ * modify the dtype of arr to contain a single field corresponding to the ith
+ * field of descr, recompute the alignment flag, and return the offset of the
+ * field (in offset_p). This is useful in preparation for calling copyswap on
+ * individual fields of a numpy structure, in VOID_setitem. Compare to inner
+ * loops in VOID_getitem and VOID_nonzero.
+ *
+ * WARNING: Clobbers arr's dtype and alignment flag.
+ */
+NPY_NO_EXPORT int
+_setup_field(int i, PyArray_Descr *descr, PyArrayObject *arr,
+ npy_intp *offset_p)
+{
+ PyObject *key;
+ PyObject *tup;
+ PyArray_Descr *new;
+ npy_intp offset;
+
+ key = PyTuple_GET_ITEM(descr->names, i);
+ tup = PyDict_GetItem(descr->fields, key);
+ if (_unpack_field(tup, &new, &offset) < 0) {
+ return -1;
+ }
+
+ ((PyArrayObject_fields *)(arr))->descr = new;
+ if ((new->alignment > 1) && ((offset % new->alignment) != 0)) {
+ PyArray_CLEARFLAGS(arr, NPY_ARRAY_ALIGNED);
+ }
+ else {
+ PyArray_ENABLEFLAGS(arr, NPY_ARRAY_ALIGNED);
+ }
+
+ *offset_p = offset;
+ return 0;
+}
+
+/* Helper function for VOID_setitem, which uses the copyswap or casting code to
+ * copy structured datatypes between numpy arrays or scalars.
+ */
+static int
+_copy_and_return_void_setitem(PyArray_Descr *dstdescr, char *dstdata,
+ PyArray_Descr *srcdescr, char *srcdata){
+ PyArrayObject_fields dummy_struct;
+ PyArrayObject *dummy = (PyArrayObject *)&dummy_struct;
+ npy_int names_size = PyTuple_GET_SIZE(dstdescr->names);
+ npy_intp offset;
+ npy_int i;
+ int ret;
+
+ /* Fast path if dtypes are equal */
+ if (PyArray_EquivTypes(srcdescr, dstdescr)) {
+ for (i = 0; i < names_size; i++) {
+ /* neither line can ever fail, in principle */
+ if (_setup_field(i, dstdescr, dummy, &offset)) {
+ return -1;
+ }
+ PyArray_DESCR(dummy)->f->copyswap(dstdata + offset,
+ srcdata + offset, 0, dummy);
+ }
+ return 0;
+ }
+
+ /* Slow path */
+ ret = PyArray_CastRawArrays(1, srcdata, dstdata, 0, 0,
+ srcdescr, dstdescr, 0);
+ if (ret != NPY_SUCCEED) {
+ return -1;
+ }
+ return 0;
+}
+
static int
VOID_setitem(PyObject *op, void *input, void *vap)
{
char *ip = input;
PyArrayObject *ap = vap;
PyArray_Descr *descr;
+ int flags;
int itemsize=PyArray_DESCR(ap)->elsize;
int res;
descr = PyArray_DESCR(ap);
- if (descr->names && PyTuple_Check(op)) {
- PyObject *key;
- PyObject *names;
- int i, n;
- PyObject *tup;
- int savedflags;
-
- res = 0;
- /* get the names from the fields dictionary*/
- names = descr->names;
- n = PyTuple_GET_SIZE(names);
- if (PyTuple_GET_SIZE(op) != n) {
- PyErr_SetString(PyExc_ValueError,
- "size of tuple must match number of fields.");
- return -1;
- }
- savedflags = PyArray_FLAGS(ap);
- for (i = 0; i < n; i++) {
- PyArray_Descr *new;
- npy_intp offset;
- key = PyTuple_GET_ITEM(names, i);
- tup = PyDict_GetItem(descr->fields, key);
- if (_unpack_field(tup, &new, &offset) < 0) {
- ((PyArrayObject_fields *)ap)->descr = descr;
+ flags = PyArray_FLAGS(ap);
+ if (PyDataType_HASFIELDS(descr)) {
+ PyObject *errmsg;
+ npy_int i;
+ npy_intp offset;
+ int failed = 0;
+
+ /* If op is 0d-ndarray or numpy scalar, directly get dtype & data ptr */
+ if (PyArray_Check(op)) {
+ PyArrayObject *oparr = (PyArrayObject *)op;
+ if (PyArray_SIZE(oparr) != 1) {
+ PyErr_SetString(PyExc_ValueError,
+ "setting an array element with a sequence.");
return -1;
}
- /*
- * TODO: temporarily modifying the array like this
- * is bad coding style, should be changed.
- */
- ((PyArrayObject_fields *)ap)->descr = new;
- /* remember to update alignment flags */
- if ((new->alignment > 1)
- && ((((npy_intp)(ip+offset)) % new->alignment) != 0)) {
- PyArray_CLEARFLAGS(ap, NPY_ARRAY_ALIGNED);
+ return _copy_and_return_void_setitem(descr, ip,
+ PyArray_DESCR(oparr), PyArray_DATA(oparr));
+ }
+ else if (PyArray_IsScalar(op, Void)) {
+ PyArray_Descr *srcdescr = ((PyVoidScalarObject *)op)->descr;
+ char *srcdata = ((PyVoidScalarObject *)op)->obval;
+ return _copy_and_return_void_setitem(descr, ip, srcdescr, srcdata);
+ }
+ else if (PyTuple_Check(op)) {
+ /* if it's a tuple, copy field-by-field to ap, */
+ npy_intp names_size = PyTuple_GET_SIZE(descr->names);
+
+ if (names_size != PyTuple_Size(op)) {
+ errmsg = PyUString_FromFormat(
+ "could not assign tuple of length %zd to structure "
+ "with %" NPY_INTP_FMT " fields.",
+ PyTuple_Size(op), names_size);
+ PyErr_SetObject(PyExc_ValueError, errmsg);
+ Py_DECREF(errmsg);
+ return -1;
}
- else {
- PyArray_ENABLEFLAGS(ap, NPY_ARRAY_ALIGNED);
+
+ for (i = 0; i < names_size; i++) {
+ PyObject *item;
+
+ /* temporarily make ap have only this field */
+ if (_setup_field(i, descr, ap, &offset) == -1) {
+ failed = 1;
+ break;
+ }
+ item = PyTuple_GetItem(op, i);
+ if (item == NULL) {
+ failed = 1;
+ break;
+ }
+ /* use setitem to set this field */
+ if (PyArray_DESCR(ap)->f->setitem(item, ip + offset, ap) < 0) {
+ failed = 1;
+ break;
+ }
}
- res = new->f->setitem(PyTuple_GET_ITEM(op, i), ip+offset, ap);
- ((PyArrayObject_fields *)ap)->flags = savedflags;
- if (res < 0) {
- break;
+ }
+ else {
+ /* Otherwise must be non-void scalar. Try to assign to each field */
+ npy_intp names_size = PyTuple_GET_SIZE(descr->names);
+
+ for (i = 0; i < names_size; i++) {
+ /* temporarily make ap have only this field */
+ if (_setup_field(i, descr, ap, &offset) == -1) {
+ failed = 1;
+ break;
+ }
+ /* use setitem to set this field */
+ if (PyArray_DESCR(ap)->f->setitem(op, ip + offset, ap) < 0) {
+ failed = 1;
+ break;
+ }
}
}
- ((PyArrayObject_fields *)ap)->descr = descr;
- return res;
- }
- if (descr->subarray) {
+ /* reset clobbered attributes */
+ ((PyArrayObject_fields *)(ap))->descr = descr;
+ ((PyArrayObject_fields *)(ap))->flags = flags;
+
+ if (failed) {
+ return -1;
+ }
+ return 0;
+ }
+ else if (PyDataType_HASSUBARRAY(descr)) {
/* copy into an array of the same basic type */
PyArray_Dims shape = {NULL, -1};
PyArrayObject *ret;
@@ -862,19 +969,17 @@ VOID_setitem(PyObject *op, void *input, void *vap)
return res;
}
- /* Default is to use buffer interface to set item */
+ /*
+ * Fall through case - non-structured void datatype. This is a very
+ * undiscerning case: It interprets any object as a buffer
+ * and reads as many bytes as possible, padding with 0.
+ */
{
const void *buffer;
Py_ssize_t buflen;
- if (PyDataType_FLAGCHK(descr, NPY_ITEM_HASOBJECT)
- || PyDataType_FLAGCHK(descr, NPY_ITEM_IS_POINTER)) {
- PyErr_SetString(PyExc_ValueError,
- "Setting void-array with object members using buffer.");
- return -1;
- }
res = PyObject_AsReadBuffer(op, &buffer, &buflen);
if (res == -1) {
- goto fail;
+ return -1;
}
memcpy(ip, buffer, PyArray_MIN(buflen, itemsize));
if (itemsize > buflen) {
@@ -882,9 +987,6 @@ VOID_setitem(PyObject *op, void *input, void *vap)
}
}
return 0;
-
-fail:
- return -1;
}
static PyObject *
diff --git a/numpy/core/src/multiarray/descriptor.c b/numpy/core/src/multiarray/descriptor.c
index 8e6aa6789..32b926ae8 100644
--- a/numpy/core/src/multiarray/descriptor.c
+++ b/numpy/core/src/multiarray/descriptor.c
@@ -3119,7 +3119,7 @@ static PyMethodDef arraydescr_methods[] = {
*
* Returns 1 if it has a simple layout, 0 otherwise.
*/
-static int
+NPY_NO_EXPORT int
is_dtype_struct_simple_unaligned_layout(PyArray_Descr *dtype)
{
PyObject *names, *fields, *key, *tup, *title;
diff --git a/numpy/core/src/multiarray/descriptor.h b/numpy/core/src/multiarray/descriptor.h
index ff1fc980a..f95041195 100644
--- a/numpy/core/src/multiarray/descriptor.h
+++ b/numpy/core/src/multiarray/descriptor.h
@@ -10,6 +10,10 @@ array_set_typeDict(PyObject *NPY_UNUSED(ignored), PyObject *args);
NPY_NO_EXPORT PyArray_Descr *
_arraydescr_fromobj(PyObject *obj);
+
+NPY_NO_EXPORT int
+is_dtype_struct_simple_unaligned_layout(PyArray_Descr *dtype);
+
/*
* Creates a string repr of the dtype, excluding the 'dtype()' part
* surrounding the object. This object may be a string, a list, or
diff --git a/numpy/core/src/multiarray/dtype_transfer.c b/numpy/core/src/multiarray/dtype_transfer.c
index dfe95d65c..9c27255aa 100644
--- a/numpy/core/src/multiarray/dtype_transfer.c
+++ b/numpy/core/src/multiarray/dtype_transfer.c
@@ -25,6 +25,7 @@
#include "ctors.h"
#include "_datetime.h"
#include "datetime_strings.h"
+#include "descriptor.h"
#include "shape.h"
#include "lowlevel_strided_loops.h"
@@ -2521,7 +2522,7 @@ _strided_to_strided_field_transfer(char *dst, npy_intp dst_stride,
/*
* Handles fields transfer. To call this, at least one of the dtypes
- * must have fields
+ * must have fields. Does not take care of object<->structure conversion
*/
static int
get_fields_transfer_function(int aligned,
@@ -2532,22 +2533,26 @@ get_fields_transfer_function(int aligned,
NpyAuxData **out_transferdata,
int *out_needs_api)
{
- PyObject *names, *key, *tup, *title;
+ PyObject *key, *tup, *title;
PyArray_Descr *src_fld_dtype, *dst_fld_dtype;
- npy_int i, names_size, field_count, structsize;
+ npy_int i, field_count, structsize;
int src_offset, dst_offset;
_field_transfer_data *data;
_single_field_transfer *fields;
+ int failed = 0;
+
+ /*
+ * There are three cases to take care of: 1. src is non-structured,
+ * 2. dst is non-structured, or 3. both are structured.
+ */
- /* Copy the src value to all the fields of dst */
+ /* 1. src is non-structured. Copy the src value to all the fields of dst */
if (!PyDataType_HASFIELDS(src_dtype)) {
- names = dst_dtype->names;
- names_size = PyTuple_GET_SIZE(dst_dtype->names);
+ field_count = PyTuple_GET_SIZE(dst_dtype->names);
- field_count = names_size;
+ /* Allocate the field-data structure and populate it */
structsize = sizeof(_field_transfer_data) +
(field_count + 1) * sizeof(_single_field_transfer);
- /* Allocate the data and populate it */
data = (_field_transfer_data *)PyArray_malloc(structsize);
if (data == NULL) {
PyErr_NoMemory();
@@ -2557,8 +2562,8 @@ get_fields_transfer_function(int aligned,
data->base.clone = &_field_transfer_data_clone;
fields = &data->fields;
- for (i = 0; i < names_size; ++i) {
- key = PyTuple_GET_ITEM(names, i);
+ for (i = 0; i < field_count; ++i) {
+ key = PyTuple_GET_ITEM(dst_dtype->names, i);
tup = PyDict_GetItem(dst_dtype->fields, key);
if (!PyArg_ParseTuple(tup, "Oi|O", &dst_fld_dtype,
&dst_offset, &title)) {
@@ -2584,7 +2589,7 @@ get_fields_transfer_function(int aligned,
}
/*
- * If the references should be removed from src, add
+ * If references should be decrefd in src, add
* another transfer function to do that.
*/
if (move_references && PyDataType_REFCHK(src_dtype)) {
@@ -2612,24 +2617,19 @@ get_fields_transfer_function(int aligned,
return NPY_SUCCEED;
}
- /* Copy the value of the first field to dst */
- else if (!PyDataType_HASFIELDS(dst_dtype)) {
- names = src_dtype->names;
- names_size = PyTuple_GET_SIZE(src_dtype->names);
- /*
- * If DECREF is needed on source fields, may need
- * to process all the fields
- */
- if (move_references && PyDataType_REFCHK(src_dtype)) {
- field_count = names_size + 1;
- }
- else {
- field_count = 1;
+ /* 2. dst is non-structured. Allow transfer from single-field src to dst */
+ if (!PyDataType_HASFIELDS(dst_dtype)) {
+ if (PyTuple_GET_SIZE(src_dtype->names) != 1) {
+ PyErr_SetString(PyExc_ValueError,
+ "Can't cast from structure to non-structure, except if the "
+ "structure only has a single field.");
+ return NPY_FAIL;
}
+
+ /* Allocate the field-data structure and populate it */
structsize = sizeof(_field_transfer_data) +
- field_count * sizeof(_single_field_transfer);
- /* Allocate the data and populate it */
+ 1 * sizeof(_single_field_transfer);
data = (_field_transfer_data *)PyArray_malloc(structsize);
if (data == NULL) {
PyErr_NoMemory();
@@ -2639,286 +2639,102 @@ get_fields_transfer_function(int aligned,
data->base.clone = &_field_transfer_data_clone;
fields = &data->fields;
- key = PyTuple_GET_ITEM(names, 0);
+ key = PyTuple_GET_ITEM(src_dtype->names, 0);
tup = PyDict_GetItem(src_dtype->fields, key);
- if (!PyArg_ParseTuple(tup, "Oi|O", &src_fld_dtype,
- &src_offset, &title)) {
- PyArray_free(data);
+ if (!PyArg_ParseTuple(tup, "Oi|O",
+ &src_fld_dtype, &src_offset, &title)) {
return NPY_FAIL;
}
- field_count = 0;
- /*
- * Special case bool type, the existence of fields implies True
- *
- * TODO: Perhaps a better behavior would be to combine all the
- * input fields with an OR? The same would apply to subarrays.
- */
- if (dst_dtype->type_num == NPY_BOOL) {
- if (get_bool_setdstone_transfer_function(dst_stride,
- &fields[field_count].stransfer,
- &fields[field_count].data,
- out_needs_api) != NPY_SUCCEED) {
- PyArray_free(data);
- return NPY_FAIL;
- }
- fields[field_count].src_offset = 0;
- fields[field_count].dst_offset = 0;
- fields[field_count].src_itemsize = 0;
- field_count++;
-
- /* If the src field has references, may need to clear them */
- if (move_references && PyDataType_REFCHK(src_fld_dtype)) {
- if (get_decsrcref_transfer_function(0,
- src_stride,
- src_fld_dtype,
- &fields[field_count].stransfer,
- &fields[field_count].data,
- out_needs_api) != NPY_SUCCEED) {
- NPY_AUXDATA_FREE(fields[0].data);
- PyArray_free(data);
- return NPY_FAIL;
- }
- fields[field_count].src_offset = src_offset;
- fields[field_count].dst_offset = 0;
- fields[field_count].src_itemsize = src_fld_dtype->elsize;
- field_count++;
- }
- }
- /* Transfer the first field to the output */
- else {
- if (PyArray_GetDTypeTransferFunction(0,
- src_stride, dst_stride,
- src_fld_dtype, dst_dtype,
- move_references,
- &fields[field_count].stransfer,
- &fields[field_count].data,
- out_needs_api) != NPY_SUCCEED) {
- PyArray_free(data);
- return NPY_FAIL;
- }
- fields[field_count].src_offset = src_offset;
- fields[field_count].dst_offset = 0;
- fields[field_count].src_itemsize = src_fld_dtype->elsize;
- field_count++;
- }
- /*
- * If the references should be removed from src, add
- * more transfer functions to decrement the references
- * for all the other fields.
- */
- if (move_references && PyDataType_REFCHK(src_dtype)) {
- for (i = 1; i < names_size; ++i) {
- key = PyTuple_GET_ITEM(names, i);
- tup = PyDict_GetItem(src_dtype->fields, key);
- if (!PyArg_ParseTuple(tup, "Oi|O", &src_fld_dtype,
- &src_offset, &title)) {
- return NPY_FAIL;
- }
- if (PyDataType_REFCHK(src_fld_dtype)) {
- if (get_decsrcref_transfer_function(0,
- src_stride,
- src_fld_dtype,
- &fields[field_count].stransfer,
- &fields[field_count].data,
- out_needs_api) != NPY_SUCCEED) {
- for (i = field_count-1; i >= 0; --i) {
- NPY_AUXDATA_FREE(fields[i].data);
- }
- PyArray_free(data);
- return NPY_FAIL;
- }
- fields[field_count].src_offset = src_offset;
- fields[field_count].dst_offset = 0;
- fields[field_count].src_itemsize = src_fld_dtype->elsize;
- field_count++;
- }
- }
+ if (PyArray_GetDTypeTransferFunction(0,
+ src_stride, dst_stride,
+ src_fld_dtype, dst_dtype,
+ move_references,
+ &fields[0].stransfer,
+ &fields[0].data,
+ out_needs_api) != NPY_SUCCEED) {
+ PyArray_free(data);
+ return NPY_FAIL;
}
+ fields[0].src_offset = src_offset;
+ fields[0].dst_offset = 0;
+ fields[0].src_itemsize = src_fld_dtype->elsize;
- data->field_count = field_count;
+ data->field_count = 1;
*out_stransfer = &_strided_to_strided_field_transfer;
*out_transferdata = (NpyAuxData *)data;
return NPY_SUCCEED;
}
- /* Match up the fields to copy */
- else {
- /* Keeps track of the names we already used */
- PyObject *used_names_dict = NULL;
- int cmpval;
-
- const char *msg =
- "Assignment between structured arrays with different field names "
- "will change in numpy 1.14.\n\n"
- "Previously fields in the dst would be set to the value of the "
- "identically-named field in the src. In numpy 1.14 fields will "
- "instead be assigned 'by position': The Nth field of the dst "
- "will be set to the Nth field of the src array.\n\n"
- "See the release notes for details";
- /*
- * 2016-09-19, 1.12
- * Warn if the field names of the dst and src are not
- * identical, since then behavior will change in 1.13.
- */
- cmpval = PyObject_RichCompareBool(src_dtype->names,
- dst_dtype->names, Py_EQ);
- if (PyErr_Occurred()) {
- return NPY_FAIL;
- }
- if (cmpval != 1) {
- if (DEPRECATE_FUTUREWARNING(msg) < 0) {
- return NPY_FAIL;
- }
- }
- names = dst_dtype->names;
- names_size = PyTuple_GET_SIZE(dst_dtype->names);
+ /* 3. Otherwise both src and dst are structured arrays */
+ field_count = PyTuple_GET_SIZE(dst_dtype->names);
- /*
- * If DECREF is needed on source fields, will need
- * to also go through its fields.
- */
- if (move_references && PyDataType_REFCHK(src_dtype)) {
- field_count = names_size + PyTuple_GET_SIZE(src_dtype->names);
- used_names_dict = PyDict_New();
- if (used_names_dict == NULL) {
- return NPY_FAIL;
- }
- }
- else {
- field_count = names_size;
- }
- structsize = sizeof(_field_transfer_data) +
- field_count * sizeof(_single_field_transfer);
- /* Allocate the data and populate it */
- data = (_field_transfer_data *)PyArray_malloc(structsize);
- if (data == NULL) {
- PyErr_NoMemory();
- Py_XDECREF(used_names_dict);
- return NPY_FAIL;
- }
- data->base.free = &_field_transfer_data_free;
- data->base.clone = &_field_transfer_data_clone;
- fields = &data->fields;
+ /* Match up the fields to copy (field-by-field transfer) */
+ if (PyTuple_GET_SIZE(src_dtype->names) != field_count) {
+ PyErr_SetString(PyExc_ValueError, "structures must have the same size");
+ return NPY_FAIL;
+ }
- for (i = 0; i < names_size; ++i) {
- key = PyTuple_GET_ITEM(names, i);
- tup = PyDict_GetItem(dst_dtype->fields, key);
- if (!PyArg_ParseTuple(tup, "Oi|O", &dst_fld_dtype,
- &dst_offset, &title)) {
- for (i = i-1; i >= 0; --i) {
- NPY_AUXDATA_FREE(fields[i].data);
- }
- PyArray_free(data);
- Py_XDECREF(used_names_dict);
- return NPY_FAIL;
- }
- tup = PyDict_GetItem(src_dtype->fields, key);
- if (tup != NULL) {
- if (!PyArg_ParseTuple(tup, "Oi|O", &src_fld_dtype,
- &src_offset, &title)) {
- for (i = i-1; i >= 0; --i) {
- NPY_AUXDATA_FREE(fields[i].data);
- }
- PyArray_free(data);
- Py_XDECREF(used_names_dict);
- return NPY_FAIL;
- }
- if (PyArray_GetDTypeTransferFunction(0,
- src_stride, dst_stride,
- src_fld_dtype, dst_fld_dtype,
- move_references,
- &fields[i].stransfer,
- &fields[i].data,
- out_needs_api) != NPY_SUCCEED) {
- for (i = i-1; i >= 0; --i) {
- NPY_AUXDATA_FREE(fields[i].data);
- }
- PyArray_free(data);
- Py_XDECREF(used_names_dict);
- return NPY_FAIL;
- }
- fields[i].src_offset = src_offset;
- fields[i].dst_offset = dst_offset;
- fields[i].src_itemsize = src_fld_dtype->elsize;
+ /* Allocate the field-data structure and populate it */
+ structsize = sizeof(_field_transfer_data) +
+ field_count * sizeof(_single_field_transfer);
+ data = (_field_transfer_data *)PyArray_malloc(structsize);
+ if (data == NULL) {
+ PyErr_NoMemory();
+ return NPY_FAIL;
+ }
+ data->base.free = &_field_transfer_data_free;
+ data->base.clone = &_field_transfer_data_clone;
+ fields = &data->fields;
- if (used_names_dict != NULL) {
- PyDict_SetItem(used_names_dict, key, Py_True);
- }
- }
- else {
- if (get_setdstzero_transfer_function(0,
- dst_stride,
- dst_fld_dtype,
- &fields[i].stransfer,
- &fields[i].data,
- out_needs_api) != NPY_SUCCEED) {
- for (i = i-1; i >= 0; --i) {
- NPY_AUXDATA_FREE(fields[i].data);
- }
- PyArray_free(data);
- Py_XDECREF(used_names_dict);
- return NPY_FAIL;
- }
- fields[i].src_offset = 0;
- fields[i].dst_offset = dst_offset;
- fields[i].src_itemsize = 0;
- }
+ /* set up the transfer function for each field */
+ for (i = 0; i < field_count; ++i) {
+ key = PyTuple_GET_ITEM(dst_dtype->names, i);
+ tup = PyDict_GetItem(dst_dtype->fields, key);
+ if (!PyArg_ParseTuple(tup, "Oi|O", &dst_fld_dtype,
+ &dst_offset, &title)) {
+ failed = 1;
+ break;
+ }
+ key = PyTuple_GET_ITEM(src_dtype->names, i);
+ tup = PyDict_GetItem(src_dtype->fields, key);
+ if (!PyArg_ParseTuple(tup, "Oi|O", &src_fld_dtype,
+ &src_offset, &title)) {
+ failed = 1;
+ break;
}
- if (move_references && PyDataType_REFCHK(src_dtype)) {
- /* Use field_count to track additional functions added */
- field_count = names_size;
-
- names = src_dtype->names;
- names_size = PyTuple_GET_SIZE(src_dtype->names);
- for (i = 0; i < names_size; ++i) {
- key = PyTuple_GET_ITEM(names, i);
- if (PyDict_GetItem(used_names_dict, key) == NULL) {
- tup = PyDict_GetItem(src_dtype->fields, key);
- if (!PyArg_ParseTuple(tup, "Oi|O", &src_fld_dtype,
- &src_offset, &title)) {
- for (i = field_count-1; i >= 0; --i) {
- NPY_AUXDATA_FREE(fields[i].data);
- }
- PyArray_free(data);
- Py_XDECREF(used_names_dict);
- return NPY_FAIL;
- }
- if (PyDataType_REFCHK(src_fld_dtype)) {
- if (get_decsrcref_transfer_function(0,
- src_stride,
- src_fld_dtype,
- &fields[field_count].stransfer,
- &fields[field_count].data,
- out_needs_api) != NPY_SUCCEED) {
- for (i = field_count-1; i >= 0; --i) {
- NPY_AUXDATA_FREE(fields[i].data);
- }
- PyArray_free(data);
- return NPY_FAIL;
- }
- fields[field_count].src_offset = src_offset;
- fields[field_count].dst_offset = 0;
- fields[field_count].src_itemsize =
- src_fld_dtype->elsize;
- field_count++;
- }
- }
- }
+ if (PyArray_GetDTypeTransferFunction(0,
+ src_stride, dst_stride,
+ src_fld_dtype, dst_fld_dtype,
+ move_references,
+ &fields[i].stransfer,
+ &fields[i].data,
+ out_needs_api) != NPY_SUCCEED) {
+ failed = 1;
+ break;
}
+ fields[i].src_offset = src_offset;
+ fields[i].dst_offset = dst_offset;
+ fields[i].src_itemsize = src_fld_dtype->elsize;
+ }
- Py_XDECREF(used_names_dict);
+ if (failed) {
+ for (i = i-1; i >= 0; --i) {
+ NPY_AUXDATA_FREE(fields[i].data);
+ }
+ PyArray_free(data);
+ return NPY_FAIL;
+ }
- data->field_count = field_count;
+ data->field_count = field_count;
- *out_stransfer = &_strided_to_strided_field_transfer;
- *out_transferdata = (NpyAuxData *)data;
+ *out_stransfer = &_strided_to_strided_field_transfer;
+ *out_transferdata = (NpyAuxData *)data;
- return NPY_SUCCEED;
- }
+ return NPY_SUCCEED;
}
static int
@@ -3649,8 +3465,10 @@ PyArray_GetDTypeTransferFunction(int aligned,
* If there are no references and the data types are equivalent,
* return a simple copy
*/
- if (!PyDataType_REFCHK(src_dtype) && !PyDataType_REFCHK(dst_dtype) &&
- PyArray_EquivTypes(src_dtype, dst_dtype)) {
+ if (PyArray_EquivTypes(src_dtype, dst_dtype) &&
+ !PyDataType_REFCHK(src_dtype) && !PyDataType_REFCHK(dst_dtype) &&
+ ( !PyDataType_HASFIELDS(dst_dtype) ||
+ is_dtype_struct_simple_unaligned_layout(dst_dtype)) ) {
/*
* We can't pass through the aligned flag because it's not
* appropriate. Consider a size-8 string, it will say it's
diff --git a/numpy/core/src/multiarray/mapping.c b/numpy/core/src/multiarray/mapping.c
index 4833b5069..1a92365c8 100644
--- a/numpy/core/src/multiarray/mapping.c
+++ b/numpy/core/src/multiarray/mapping.c
@@ -1446,10 +1446,6 @@ _get_field_view(PyArrayObject *arr, PyObject *ind, PyArrayObject **view)
PyObject *fields, *names;
PyArray_Descr *view_dtype;
- /* variables needed to make a copy, to remove in the future */
- static PyObject *copyfunc = NULL;
- PyObject *viewcopy;
-
seqlen = PySequence_Size(ind);
/* quit if have a 0-d array (seqlen==-1) or a 0-len array */
@@ -1502,6 +1498,35 @@ _get_field_view(PyArrayObject *arr, PyObject *ind, PyArrayObject **view)
Py_DECREF(names);
return 0;
}
+ // disallow use of titles as index
+ if (PyTuple_Size(tup) == 3) {
+ PyObject *title = PyTuple_GET_ITEM(tup, 2);
+ int titlecmp = PyObject_RichCompareBool(title, name, Py_EQ);
+ if (titlecmp == 1) {
+ // if title == name, we were given a title, not a field name
+ PyErr_SetString(PyExc_KeyError,
+ "cannot use field titles in multi-field index");
+ }
+ if (titlecmp != 0 || PyDict_SetItem(fields, title, tup) < 0) {
+ Py_DECREF(title);
+ Py_DECREF(name);
+ Py_DECREF(fields);
+ Py_DECREF(names);
+ return 0;
+ }
+ Py_DECREF(title);
+ }
+ // disallow duplicate field indices
+ if (PyDict_Contains(fields, name)) {
+ PyObject *errmsg = PyUString_FromString(
+ "duplicate field of name ");
+ PyUString_ConcatAndDel(&errmsg, name);
+ PyErr_SetObject(PyExc_KeyError, errmsg);
+ Py_DECREF(errmsg);
+ Py_DECREF(fields);
+ Py_DECREF(names);
+ return 0;
+ }
if (PyDict_SetItem(fields, name, tup) < 0) {
Py_DECREF(name);
Py_DECREF(fields);
@@ -1545,29 +1570,6 @@ _get_field_view(PyArrayObject *arr, PyObject *ind, PyArrayObject **view)
return 0;
}
- /*
- * Return copy for now (future plan to return the view above). All the
- * following code in this block can then be replaced by "return 0;"
- */
- npy_cache_import("numpy.core._internal", "_copy_fields", &copyfunc);
- if (copyfunc == NULL) {
- Py_DECREF(*view);
- *view = NULL;
- return 0;
- }
-
- PyArray_CLEARFLAGS(*view, NPY_ARRAY_WARN_ON_WRITE);
- viewcopy = PyObject_CallFunction(copyfunc, "O", *view);
- if (viewcopy == NULL) {
- Py_DECREF(*view);
- *view = NULL;
- return 0;
- }
- Py_DECREF(*view);
- *view = (PyArrayObject*)viewcopy;
-
- /* warn when writing to the copy */
- PyArray_ENABLEFLAGS(*view, NPY_ARRAY_WARN_ON_WRITE);
return 0;
}
return -1;
@@ -1601,11 +1603,6 @@ array_subscript(PyArrayObject *self, PyObject *op)
if (view == NULL) {
return NULL;
}
-
- /* warn if writing to a copy. copies will have no base */
- if (PyArray_BASE(view) == NULL) {
- PyArray_ENABLEFLAGS(view, NPY_ARRAY_WARN_ON_WRITE);
- }
return (PyObject*)view;
}
}
@@ -1892,17 +1889,6 @@ array_assign_subscript(PyArrayObject *self, PyObject *ind, PyObject *op)
PyArrayObject *view;
int ret = _get_field_view(self, ind, &view);
if (ret == 0){
-
-#if defined(NPY_PY3K)
- if (!PyUnicode_Check(ind)) {
-#else
- if (!PyString_Check(ind) && !PyUnicode_Check(ind)) {
-#endif
- PyErr_SetString(PyExc_ValueError,
- "multi-field assignment is not supported");
- return -1;
- }
-
if (view == NULL) {
return -1;
}
diff --git a/numpy/core/src/multiarray/multiarraymodule.c b/numpy/core/src/multiarray/multiarraymodule.c
index 72515a3aa..3bf42d72c 100644
--- a/numpy/core/src/multiarray/multiarraymodule.c
+++ b/numpy/core/src/multiarray/multiarraymodule.c
@@ -1418,29 +1418,34 @@ array_putmask(PyObject *NPY_UNUSED(module), PyObject *args, PyObject *kwds)
/*
* Compare the field dictionaries for two types.
*
- * Return 1 if the contents are the same, 0 if not.
+ * Return 1 if the field types and field names of the two descrs are equal and
+ * in the same order, 0 if not.
*/
static int
-_equivalent_fields(PyObject *field1, PyObject *field2) {
+_equivalent_fields(PyArray_Descr *type1, PyArray_Descr *type2) {
- int same, val;
+ int val;
- if (field1 == field2) {
+ if (type1->fields == type2->fields && type1->names == type2->names) {
return 1;
}
- if (field1 == NULL || field2 == NULL) {
+ if (type1->fields == NULL || type2->fields == NULL) {
return 0;
}
- val = PyObject_RichCompareBool(field1, field2, Py_EQ);
+ val = PyObject_RichCompareBool(type1->fields, type2->fields, Py_EQ);
if (val != 1 || PyErr_Occurred()) {
- same = 0;
+ PyErr_Clear();
+ return 0;
}
- else {
- same = 1;
+
+ val = PyObject_RichCompareBool(type1->names, type2->names, Py_EQ);
+ if (val != 1 || PyErr_Occurred()) {
+ PyErr_Clear();
+ return 0;
}
- PyErr_Clear();
- return same;
+
+ return 1;
}
/*
@@ -1499,10 +1504,8 @@ PyArray_EquivTypes(PyArray_Descr *type1, PyArray_Descr *type2)
return ((type_num1 == type_num2)
&& _equivalent_subarrays(type1->subarray, type2->subarray));
}
- if (type_num1 == NPY_VOID
- || type_num2 == NPY_VOID) {
- return ((type_num1 == type_num2)
- && _equivalent_fields(type1->fields, type2->fields));
+ if (type_num1 == NPY_VOID || type_num2 == NPY_VOID) {
+ return ((type_num1 == type_num2) && _equivalent_fields(type1, type2));
}
if (type_num1 == NPY_DATETIME
|| type_num1 == NPY_TIMEDELTA
diff --git a/numpy/core/src/multiarray/scalartypes.c.src b/numpy/core/src/multiarray/scalartypes.c.src
index 3b2aa8a43..7a6ed6a86 100644
--- a/numpy/core/src/multiarray/scalartypes.c.src
+++ b/numpy/core/src/multiarray/scalartypes.c.src
@@ -2179,35 +2179,31 @@ static PyObject *
voidtype_subscript(PyVoidScalarObject *self, PyObject *ind)
{
npy_intp n;
- PyObject *ret, *args;
+ PyObject *ret, *res;
- if (!(PyDataType_HASFIELDS(self->descr))) {
- PyErr_SetString(PyExc_IndexError,
- "can't index void scalar without fields");
- return NULL;
+ /* structured voids will accept an integer index */
+ if (PyDataType_HASFIELDS(self->descr)) {
+ n = PyArray_PyIntAsIntp(ind);
+ if (!error_converting(n)) {
+ return voidtype_item(self, (Py_ssize_t)n);
+ }
+ PyErr_Clear();
}
-#if defined(NPY_PY3K)
- if (PyUString_Check(ind)) {
-#else
- if (PyBytes_Check(ind) || PyUnicode_Check(ind)) {
-#endif
- args = Py_BuildValue("(O)", ind);
- ret = gentype_generic_method((PyObject *)self, args, NULL, "__getitem__");
- Py_DECREF(args);
- return ret;
- }
+ res = PyArray_FromScalar((PyObject*)self, NULL);
- /* try to convert it to a number */
- n = PyArray_PyIntAsIntp(ind);
- if (error_converting(n)) {
- goto fail;
+ /* ellipsis should return 0d array */
+ if(ind == Py_Ellipsis){
+ return res;
}
- return voidtype_item(self, (Py_ssize_t)n);
-fail:
- PyErr_SetString(PyExc_IndexError, "invalid index");
- return NULL;
+ /*
+ * other cases (field names, empty tuple) will return either
+ * scalar or non-0d array. Compute this using ndarray subscript.
+ */
+ ret = array_subscript((PyArrayObject *)res, ind);
+ Py_DECREF(res);
+ return PyArray_Return((PyArrayObject*)ret);
}
static int
diff --git a/numpy/core/tests/test_dtype.py b/numpy/core/tests/test_dtype.py
index 6f6654d42..9cefb2ad1 100644
--- a/numpy/core/tests/test_dtype.py
+++ b/numpy/core/tests/test_dtype.py
@@ -104,6 +104,15 @@ class TestBuiltin(object):
'formats':['i1', 'f4'],
'offsets':[0, 2]}, align=True)
+ def test_field_order_equality(self):
+ x = np.dtype({'names': ['A', 'B'],
+ 'formats': ['i4', 'f4'],
+ 'offsets': [0, 4]})
+ y = np.dtype({'names': ['B', 'A'],
+ 'formats': ['f4', 'i4'],
+ 'offsets': [4, 0]})
+ assert_equal(x == y, False)
+
class TestRecord(object):
def test_equivalent_record(self):
"""Test whether equivalent record dtypes hash the same."""
@@ -211,11 +220,12 @@ class TestRecord(object):
dt = np.dtype({'names':['f0', 'f1', 'f2'], 'formats':['<u4', '<u2', '<u2'],
'offsets':[4, 0, 2]}, align=True)
assert_equal(dt.itemsize, 8)
+ # field name should not matter: assignment is by position
dt2 = np.dtype({'names':['f2', 'f0', 'f1'],
- 'formats':['<u2', '<u4', '<u2'],
- 'offsets':[2, 4, 0]}, align=True)
+ 'formats':['<u4', '<u2', '<u2'],
+ 'offsets':[4, 0, 2]}, align=True)
vals = [(0, 1, 2), (3, -1, 4)]
- vals2 = [(2, 0, 1), (4, 3, -1)]
+ vals2 = [(0, 1, 2), (3, -1, 4)]
a = np.array(vals, dt)
b = np.array(vals2, dt2)
assert_equal(a.astype(dt2), b)
diff --git a/numpy/core/tests/test_indexing.py b/numpy/core/tests/test_indexing.py
index 43965d994..4c3bac529 100644
--- a/numpy/core/tests/test_indexing.py
+++ b/numpy/core/tests/test_indexing.py
@@ -106,6 +106,12 @@ class TestIndexing(object):
a = np.array(0)
assert_(isinstance(a[()], np.int_))
+ def test_void_scalar_empty_tuple(self):
+ s = np.zeros((), dtype='V4')
+ assert_equal(s[()].dtype, s.dtype)
+ assert_equal(s[()], s)
+ assert_equal(type(s[...]), np.ndarray)
+
def test_same_kind_index_casting(self):
# Indexes should be cast with same-kind and not safe, even if that
# is somewhat unsafe. So test various different code paths.
diff --git a/numpy/core/tests/test_multiarray.py b/numpy/core/tests/test_multiarray.py
index 0ec05eed5..e19087540 100644
--- a/numpy/core/tests/test_multiarray.py
+++ b/numpy/core/tests/test_multiarray.py
@@ -952,16 +952,13 @@ class TestStructured(object):
# Check that equality comparison works on structured arrays if
# they are 'equiv'-castable
a = np.array([(5, 42), (10, 1)], dtype=[('a', '>i4'), ('b', '<f8')])
- b = np.array([(42, 5), (1, 10)], dtype=[('b', '>f8'), ('a', '<i4')])
+ b = np.array([(5, 42), (10, 1)], dtype=[('a', '<i4'), ('b', '>f8')])
assert_(np.can_cast(a.dtype, b.dtype, casting='equiv'))
assert_equal(a == b, [True, True])
- # Check that 'equiv' casting can reorder fields and change byte
- # order
- # New in 1.12: This behavior changes in 1.13, test for dep warning
+ # Check that 'equiv' casting can change byte order
assert_(np.can_cast(a.dtype, b.dtype, casting='equiv'))
- with assert_warns(FutureWarning):
- c = a.astype(b.dtype, casting='equiv')
+ c = a.astype(b.dtype, casting='equiv')
assert_equal(a == c, [True, True])
# Check that 'safe' casting can change byte order and up-cast
@@ -1096,6 +1093,54 @@ class TestStructured(object):
b = a[0]
assert_(b.base is a)
+ def test_assignment(self):
+ def testassign(arr, v):
+ c = arr.copy()
+ c[0] = v # assign using setitem
+ c[1:] = v # assign using "dtype_transfer" code paths
+ return c
+
+ dt = np.dtype([('foo', 'i8'), ('bar', 'i8')])
+ arr = np.ones(2, dt)
+ v1 = np.array([(2,3)], dtype=[('foo', 'i8'), ('bar', 'i8')])
+ v2 = np.array([(2,3)], dtype=[('bar', 'i8'), ('foo', 'i8')])
+ v3 = np.array([(2,3)], dtype=[('bar', 'i8'), ('baz', 'i8')])
+ v4 = np.array([(2,)], dtype=[('bar', 'i8')])
+ v5 = np.array([(2,3)], dtype=[('foo', 'f8'), ('bar', 'f8')])
+ w = arr.view({'names': ['bar'], 'formats': ['i8'], 'offsets': [8]})
+
+ ans = np.array([(2,3),(2,3)], dtype=dt)
+ assert_equal(testassign(arr, v1), ans)
+ assert_equal(testassign(arr, v2), ans)
+ assert_equal(testassign(arr, v3), ans)
+ assert_raises(ValueError, lambda: testassign(arr, v4))
+ assert_equal(testassign(arr, v5), ans)
+ w[:] = 4
+ assert_equal(arr, np.array([(1,4),(1,4)], dtype=dt))
+
+ # test field-reordering, assignment by position, and self-assignment
+ a = np.array([(1,2,3)],
+ dtype=[('foo', 'i8'), ('bar', 'i8'), ('baz', 'f4')])
+ a[['foo', 'bar']] = a[['bar', 'foo']]
+ assert_equal(a[0].item(), (2,1,3))
+
+ # test that this works even for 'simple_unaligned' structs
+ # (ie, that PyArray_EquivTypes cares about field order too)
+ a = np.array([(1,2)], dtype=[('a', 'i4'), ('b', 'i4')])
+ a[['a', 'b']] = a[['b', 'a']]
+ assert_equal(a[0].item(), (2,1))
+
+ def test_structuredscalar_indexing(self):
+ # test gh-7262
+ x = np.empty(shape=1, dtype="(2)3S,(2)3U")
+ assert_equal(x[["f0","f1"]][0], x[0][["f0","f1"]])
+ assert_equal(x[0], x[0][()])
+
+ def test_multiindex_titles(self):
+ a = np.zeros(4, dtype=[(('a', 'b'), 'i'), ('c', 'i'), ('d', 'i')])
+ assert_raises(KeyError, lambda : a[['a','c']])
+ assert_raises(KeyError, lambda : a[['b','b']])
+ a[['b','c']] # no exception
class TestBool(object):
def test_test_interning(self):
@@ -4498,23 +4543,11 @@ class TestRecord(object):
# multiple subfields
fn2 = func('f2')
b[fn2] = 3
- with suppress_warnings() as sup:
- sup.filter(FutureWarning,
- "Assignment between structured arrays.*")
- sup.filter(FutureWarning,
- "Numpy has detected that you .*")
-
- assert_equal(b[['f1', 'f2']][0].tolist(), (2, 3))
- assert_equal(b[['f2', 'f1']][0].tolist(), (3, 2))
- assert_equal(b[['f1', 'f3']][0].tolist(), (2, (1,)))
- # view of subfield view/copy
- assert_equal(b[['f1', 'f2']][0].view(('i4', 2)).tolist(),
- (2, 3))
- assert_equal(b[['f2', 'f1']][0].view(('i4', 2)).tolist(),
- (3, 2))
- view_dtype = [('f1', 'i4'), ('f3', [('', 'i4')])]
- assert_equal(b[['f1', 'f3']][0].view(view_dtype).tolist(),
- (2, (1,)))
+
+ assert_equal(b[['f1', 'f2']][0].tolist(), (2, 3))
+ assert_equal(b[['f2', 'f1']][0].tolist(), (3, 2))
+ assert_equal(b[['f1', 'f3']][0].tolist(), (2, (1,)))
+
# non-ascii unicode field indexing is well behaved
if not is_py3:
raise SkipTest('non ascii unicode field indexing skipped; '
@@ -4523,54 +4556,6 @@ class TestRecord(object):
assert_raises(ValueError, a.__setitem__, u'\u03e0', 1)
assert_raises(ValueError, a.__getitem__, u'\u03e0')
- def test_field_names_deprecation(self):
-
- def collect_warnings(f, *args, **kwargs):
- with warnings.catch_warnings(record=True) as log:
- warnings.simplefilter("always")
- f(*args, **kwargs)
- return [w.category for w in log]
-
- a = np.zeros((1,), dtype=[('f1', 'i4'),
- ('f2', 'i4'),
- ('f3', [('sf1', 'i4')])])
- a['f1'][0] = 1
- a['f2'][0] = 2
- a['f3'][0] = (3,)
- b = np.zeros((1,), dtype=[('f1', 'i4'),
- ('f2', 'i4'),
- ('f3', [('sf1', 'i4')])])
- b['f1'][0] = 1
- b['f2'][0] = 2
- b['f3'][0] = (3,)
-
- # All the different functions raise a warning, but not an error
- assert_equal(collect_warnings(a[['f1', 'f2']].__setitem__, 0, (10, 20)),
- [FutureWarning])
- # For <=1.12 a is not modified, but it will be in 1.13
- assert_equal(a, b)
-
- # Views also warn
- subset = a[['f1', 'f2']]
- subset_view = subset.view()
- assert_equal(collect_warnings(subset_view['f1'].__setitem__, 0, 10),
- [FutureWarning])
- # But the write goes through:
- assert_equal(subset['f1'][0], 10)
- # Only one warning per multiple field indexing, though (even if there
- # are multiple views involved):
- assert_equal(collect_warnings(subset['f1'].__setitem__, 0, 10), [])
-
- # make sure views of a multi-field index warn too
- c = np.zeros(3, dtype='i8,i8,i8')
- assert_equal(collect_warnings(c[['f0', 'f2']].view, 'i8,i8'),
- [FutureWarning])
-
- # make sure assignment using a different dtype warns
- a = np.zeros(2, dtype=[('a', 'i4'), ('b', 'i4')])
- b = np.zeros(2, dtype=[('b', 'i4'), ('a', 'i4')])
- assert_equal(collect_warnings(a.__setitem__, (), b), [FutureWarning])
-
def test_record_hash(self):
a = np.array([(1, 2), (1, 2)], dtype='i1,i2')
a.flags.writeable = False
diff --git a/numpy/core/tests/test_nditer.py b/numpy/core/tests/test_nditer.py
index 885dcb56f..59e11f22e 100644
--- a/numpy/core/tests/test_nditer.py
+++ b/numpy/core/tests/test_nditer.py
@@ -1816,100 +1816,45 @@ def test_iter_buffered_cast_structured_type():
if HAS_REFCOUNT:
assert_equal(sys.getrefcount(a[0]), rc)
- # struct type -> simple (takes the first value)
- sdt = [('a', 'f4'), ('b', 'i8'), ('d', 'O')]
- a = np.array([(5.5, 7, 'test'), (8, 10, 11)], dtype=sdt)
+ # single-field struct type -> simple
+ sdt = [('a', 'f4')]
+ a = np.array([(5.5,), (8,)], dtype=sdt)
i = nditer(a, ['buffered', 'refs_ok'], ['readonly'],
casting='unsafe',
op_dtypes='i4')
assert_equal([x_[()] for x_ in i], [5, 8])
+ # make sure multi-field struct type -> simple doesn't work
+ sdt = [('a', 'f4'), ('b', 'i8'), ('d', 'O')]
+ a = np.array([(5.5, 7, 'test'), (8, 10, 11)], dtype=sdt)
+ assert_raises(ValueError, lambda: (
+ nditer(a, ['buffered', 'refs_ok'], ['readonly'],
+ casting='unsafe',
+ op_dtypes='i4')))
+
# struct type -> struct type (field-wise copy)
sdt1 = [('a', 'f4'), ('b', 'i8'), ('d', 'O')]
sdt2 = [('d', 'u2'), ('a', 'O'), ('b', 'f8')]
a = np.array([(1, 2, 3), (4, 5, 6)], dtype=sdt1)
- # New in 1.12: This behavior changes in 1.13, test for dep warning
- with assert_warns(FutureWarning):
- i = nditer(a, ['buffered', 'refs_ok'], ['readonly'],
- casting='unsafe',
- op_dtypes=sdt2)
+ i = nditer(a, ['buffered', 'refs_ok'], ['readonly'],
+ casting='unsafe',
+ op_dtypes=sdt2)
assert_equal(i[0].dtype, np.dtype(sdt2))
assert_equal([np.array(x_) for x_ in i],
- [np.array((3, 1, 2), dtype=sdt2),
- np.array((6, 4, 5), dtype=sdt2)])
+ [np.array((1, 2, 3), dtype=sdt2),
+ np.array((4, 5, 6), dtype=sdt2)])
- # struct type -> struct type (field gets discarded)
+ # make sure struct type -> struct type with different
+ # number of fields fails
sdt1 = [('a', 'f4'), ('b', 'i8'), ('d', 'O')]
sdt2 = [('b', 'O'), ('a', 'f8')]
a = np.array([(1, 2, 3), (4, 5, 6)], dtype=sdt1)
- # New in 1.12: This behavior changes in 1.13, test for dep warning
- with assert_warns(FutureWarning):
- i = nditer(a, ['buffered', 'refs_ok'], ['readwrite'],
- casting='unsafe',
- op_dtypes=sdt2)
- assert_equal(i[0].dtype, np.dtype(sdt2))
- vals = []
- for x in i:
- vals.append(np.array(x))
- x['a'] = x['b']+3
- assert_equal(vals, [np.array((2, 1), dtype=sdt2),
- np.array((5, 4), dtype=sdt2)])
- assert_equal(a, np.array([(5, 2, None), (8, 5, None)], dtype=sdt1))
-
- # struct type -> struct type (structured field gets discarded)
- sdt1 = [('a', 'f4'), ('b', 'i8'), ('d', [('a', 'i2'), ('b', 'i4')])]
- sdt2 = [('b', 'O'), ('a', 'f8')]
- a = np.array([(1, 2, (0, 9)), (4, 5, (20, 21))], dtype=sdt1)
- # New in 1.12: This behavior changes in 1.13, test for dep warning
- with assert_warns(FutureWarning):
- i = nditer(a, ['buffered', 'refs_ok'], ['readwrite'],
- casting='unsafe',
- op_dtypes=sdt2)
- assert_equal(i[0].dtype, np.dtype(sdt2))
- vals = []
- for x in i:
- vals.append(np.array(x))
- x['a'] = x['b']+3
- assert_equal(vals, [np.array((2, 1), dtype=sdt2),
- np.array((5, 4), dtype=sdt2)])
- assert_equal(a, np.array([(5, 2, (0, 0)), (8, 5, (0, 0))], dtype=sdt1))
-
- # struct type -> struct type (structured field w/ ref gets discarded)
- sdt1 = [('a', 'f4'), ('b', 'i8'), ('d', [('a', 'i2'), ('b', 'O')])]
- sdt2 = [('b', 'O'), ('a', 'f8')]
- a = np.array([(1, 2, (0, 9)), (4, 5, (20, 21))], dtype=sdt1)
- # New in 1.12: This behavior changes in 1.13, test for dep warning
- with assert_warns(FutureWarning):
- i = nditer(a, ['buffered', 'refs_ok'], ['readwrite'],
- casting='unsafe',
- op_dtypes=sdt2)
- assert_equal(i[0].dtype, np.dtype(sdt2))
- vals = []
- for x in i:
- vals.append(np.array(x))
- x['a'] = x['b']+3
- assert_equal(vals, [np.array((2, 1), dtype=sdt2),
- np.array((5, 4), dtype=sdt2)])
- assert_equal(a, np.array([(5, 2, (0, None)), (8, 5, (0, None))], dtype=sdt1))
-
- # struct type -> struct type back (structured field w/ ref gets discarded)
- sdt1 = [('b', 'O'), ('a', 'f8')]
- sdt2 = [('a', 'f4'), ('b', 'i8'), ('d', [('a', 'i2'), ('b', 'O')])]
- a = np.array([(1, 2), (4, 5)], dtype=sdt1)
- # New in 1.12: This behavior changes in 1.13, test for dep warning
- with assert_warns(FutureWarning):
- i = nditer(a, ['buffered', 'refs_ok'], ['readwrite'],
- casting='unsafe',
- op_dtypes=sdt2)
- assert_equal(i[0].dtype, np.dtype(sdt2))
- vals = []
- for x in i:
- vals.append(np.array(x))
- assert_equal(x['d'], np.array((0, None), dtype=[('a', 'i2'), ('b', 'O')]))
- x['a'] = x['b']+3
- assert_equal(vals, [np.array((2, 1, (0, None)), dtype=sdt2),
- np.array((5, 4, (0, None)), dtype=sdt2)])
- assert_equal(a, np.array([(1, 4), (4, 7)], dtype=sdt1))
+
+ assert_raises(ValueError, lambda : (
+ nditer(a, ['buffered', 'refs_ok'], ['readwrite'],
+ casting='unsafe',
+ op_dtypes=sdt2)))
+
def test_iter_buffered_cast_subarray():
# Tests buffering of subarrays
diff --git a/numpy/core/tests/test_records.py b/numpy/core/tests/test_records.py
index d7714132b..27d35fa65 100644
--- a/numpy/core/tests/test_records.py
+++ b/numpy/core/tests/test_records.py
@@ -153,11 +153,6 @@ class TestFromrecords(object):
assert_equal(r['c'].dtype.type, np.record)
assert_equal(type(r['c']), np.recarray)
- # suppress deprecation warning in 1.12 (remove in 1.13)
- with assert_warns(FutureWarning):
- assert_equal(r[['a', 'b']].dtype.type, np.record)
- assert_equal(type(r[['a', 'b']]), np.recarray)
-
#and that it preserves subclasses (gh-6949)
class C(np.recarray):
pass
@@ -334,15 +329,6 @@ class TestRecord(object):
with assert_raises(ValueError):
r.setfield([2,3], *r.dtype.fields['f'])
- def test_out_of_order_fields(self):
- """Ticket #1431."""
- # this test will be invalid in 1.13
- # suppress deprecation warning in 1.12 (remove in 1.13)
- with assert_warns(FutureWarning):
- x = self.data[['col1', 'col2']]
- y = self.data[['col2', 'col1']]
- assert_equal(x[0][0], y[0][1])
-
def test_pickle_1(self):
# Issue #1529
a = np.array([(1, [])], dtype=[('a', np.int32), ('b', np.int32, 0)])
@@ -371,8 +357,7 @@ class TestRecord(object):
# https://github.com/numpy/numpy/issues/3256
ra = np.recarray((2,), dtype=[('x', object), ('y', float), ('z', int)])
- with assert_warns(FutureWarning):
- ra[['x','y']] # TypeError?
+ ra[['x','y']] # TypeError?
def test_record_scalar_setitem(self):
# https://github.com/numpy/numpy/issues/3561
diff --git a/numpy/lib/tests/test_io.py b/numpy/lib/tests/test_io.py
index f2fd37230..e6b1eac7a 100644
--- a/numpy/lib/tests/test_io.py
+++ b/numpy/lib/tests/test_io.py
@@ -1178,12 +1178,12 @@ M 33 21.99
conv = {0: int, 1: int, 2: int, 3: lambda r: dmap[r.decode()]}
test = np.recfromcsv(TextIO(dstr,), dtype=dtyp, delimiter=',',
names=None, converters=conv)
- control = np.rec.array([[1,5,-1,0], [2,8,-1,1], [3,3,-2,3]], dtype=dtyp)
+ control = np.rec.array([(1,5,-1,0), (2,8,-1,1), (3,3,-2,3)], dtype=dtyp)
assert_equal(test, control)
dtyp = [('e1','i4'),('e2','i4'),('n', 'i1')]
test = np.recfromcsv(TextIO(dstr,), dtype=dtyp, delimiter=',',
usecols=(0,1,3), names=None, converters=conv)
- control = np.rec.array([[1,5,0], [2,8,1], [3,3,3]], dtype=dtyp)
+ control = np.rec.array([(1,5,0), (2,8,1), (3,3,3)], dtype=dtyp)
assert_equal(test, control)
def test_dtype_with_object(self):
diff --git a/numpy/ma/core.py b/numpy/ma/core.py
index 8efe45eed..78e7b27a7 100644
--- a/numpy/ma/core.py
+++ b/numpy/ma/core.py
@@ -1613,6 +1613,11 @@ def make_mask(m, copy=False, shrink=True, dtype=MaskType):
# Make sure the input dtype is valid.
dtype = make_mask_descr(dtype)
+
+ # legacy boolean special case: "existence of fields implies true"
+ if isinstance(m, ndarray) and m.dtype.fields and dtype == np.bool_:
+ return np.ones(m.shape, dtype=dtype)
+
# Fill the mask in case there are missing data; turn it into an ndarray.
result = np.array(filled(m, True), copy=copy, dtype=dtype, subok=True)
# Bas les masques !
diff --git a/numpy/ma/tests/test_core.py b/numpy/ma/tests/test_core.py
index 5da19f9ca..fa9f97233 100644
--- a/numpy/ma/tests/test_core.py
+++ b/numpy/ma/tests/test_core.py
@@ -1760,15 +1760,11 @@ class TestFillingValues(object):
assert_equal(fval.item(), [-999, -12345678.9, b"???"])
#.....Using a flexible type w/ a different type shouldn't matter
- # BEHAVIOR in 1.5 and earlier: match structured types by position
- #fill_val = np.array((-999, -12345678.9, "???"),
- # dtype=[("A", int), ("B", float), ("C", "|S3")])
- # BEHAVIOR in 1.6 and later: match structured types by name
- fill_val = np.array(("???", -999, -12345678.9),
- dtype=[("c", "|S3"), ("a", int), ("b", float), ])
- # suppress deprecation warning in 1.12 (remove in 1.13)
- with assert_warns(FutureWarning):
- fval = _check_fill_value(fill_val, ndtype)
+ # BEHAVIOR in 1.5 and earlier, and 1.13 and later: match structured
+ # types by position
+ fill_val = np.array((-999, -12345678.9, "???"),
+ dtype=[("A", int), ("B", float), ("C", "|S3")])
+ fval = _check_fill_value(fill_val, ndtype)
assert_(isinstance(fval, ndarray))
assert_equal(fval.item(), [-999, -12345678.9, b"???"])