1 /* C Extension module to test all aspects of PEP-3118.
2 Written by Stefan Krah. */
3
4 #include "Python.h"
5
6
7 /* struct module */
8 static PyObject *structmodule = NULL;
9 static PyObject *Struct = NULL;
10 static PyObject *calcsize = NULL;
11
12 /* cache simple format string */
13 static const char *simple_fmt = "B";
14 static PyObject *simple_format = NULL;
15 #define SIMPLE_FORMAT(fmt) (fmt == NULL || strcmp(fmt, "B") == 0)
16 #define FIX_FORMAT(fmt) (fmt == NULL ? "B" : fmt)
17
18
19 /**************************************************************************/
20 /* NDArray Object */
21 /**************************************************************************/
22
23 static PyTypeObject NDArray_Type;
24 #define NDArray_Check(v) Py_IS_TYPE(v, &NDArray_Type)
25
26 #define CHECK_LIST_OR_TUPLE(v) \
27 if (!PyList_Check(v) && !PyTuple_Check(v)) { \
28 PyErr_SetString(PyExc_TypeError, \
29 #v " must be a list or a tuple"); \
30 return NULL; \
31 } \
32
33 #define PyMem_XFree(v) \
34 do { if (v) PyMem_Free(v); } while (0)
35
36 /* Maximum number of dimensions. */
37 #define ND_MAX_NDIM (2 * PyBUF_MAX_NDIM)
38
39 /* Check for the presence of suboffsets in the first dimension. */
40 #define HAVE_PTR(suboffsets) (suboffsets && suboffsets[0] >= 0)
41 /* Adjust ptr if suboffsets are present. */
42 #define ADJUST_PTR(ptr, suboffsets) \
43 (HAVE_PTR(suboffsets) ? *((char**)ptr) + suboffsets[0] : ptr)
44
45 /* Default: NumPy style (strides), read-only, no var-export, C-style layout */
46 #define ND_DEFAULT 0x000
47 /* User configurable flags for the ndarray */
48 #define ND_VAREXPORT 0x001 /* change layout while buffers are exported */
49 /* User configurable flags for each base buffer */
50 #define ND_WRITABLE 0x002 /* mark base buffer as writable */
51 #define ND_FORTRAN 0x004 /* Fortran contiguous layout */
52 #define ND_SCALAR 0x008 /* scalar: ndim = 0 */
53 #define ND_PIL 0x010 /* convert to PIL-style array (suboffsets) */
54 #define ND_REDIRECT 0x020 /* redirect buffer requests */
55 #define ND_GETBUF_FAIL 0x040 /* trigger getbuffer failure */
56 #define ND_GETBUF_UNDEFINED 0x080 /* undefined view.obj */
57 /* Internal flags for the base buffer */
58 #define ND_C 0x100 /* C contiguous layout (default) */
59 #define ND_OWN_ARRAYS 0x200 /* consumer owns arrays */
60
61 /* ndarray properties */
62 #define ND_IS_CONSUMER(nd) \
63 (((NDArrayObject *)nd)->head == &((NDArrayObject *)nd)->staticbuf)
64
65 /* ndbuf->flags properties */
66 #define ND_C_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C)))
67 #define ND_FORTRAN_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_FORTRAN)))
68 #define ND_ANY_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C|ND_FORTRAN)))
69
70 /* getbuffer() requests */
71 #define REQ_INDIRECT(flags) ((flags&PyBUF_INDIRECT) == PyBUF_INDIRECT)
72 #define REQ_C_CONTIGUOUS(flags) ((flags&PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS)
73 #define REQ_F_CONTIGUOUS(flags) ((flags&PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS)
74 #define REQ_ANY_CONTIGUOUS(flags) ((flags&PyBUF_ANY_CONTIGUOUS) == PyBUF_ANY_CONTIGUOUS)
75 #define REQ_STRIDES(flags) ((flags&PyBUF_STRIDES) == PyBUF_STRIDES)
76 #define REQ_SHAPE(flags) ((flags&PyBUF_ND) == PyBUF_ND)
77 #define REQ_WRITABLE(flags) (flags&PyBUF_WRITABLE)
78 #define REQ_FORMAT(flags) (flags&PyBUF_FORMAT)
79
80
81 /* Single node of a list of base buffers. The list is needed to implement
82 changes in memory layout while exported buffers are active. */
83 static PyTypeObject NDArray_Type;
84
85 struct ndbuf;
86 typedef struct ndbuf {
87 struct ndbuf *next;
88 struct ndbuf *prev;
89 Py_ssize_t len; /* length of data */
90 Py_ssize_t offset; /* start of the array relative to data */
91 char *data; /* raw data */
92 int flags; /* capabilities of the base buffer */
93 Py_ssize_t exports; /* number of exports */
94 Py_buffer base; /* base buffer */
95 } ndbuf_t;
96
97 typedef struct {
98 PyObject_HEAD
99 int flags; /* ndarray flags */
100 ndbuf_t staticbuf; /* static buffer for re-exporting mode */
101 ndbuf_t *head; /* currently active base buffer */
102 } NDArrayObject;
103
104
105 static ndbuf_t *
ndbuf_new(Py_ssize_t nitems,Py_ssize_t itemsize,Py_ssize_t offset,int flags)106 ndbuf_new(Py_ssize_t nitems, Py_ssize_t itemsize, Py_ssize_t offset, int flags)
107 {
108 ndbuf_t *ndbuf;
109 Py_buffer *base;
110 Py_ssize_t len;
111
112 len = nitems * itemsize;
113 if (offset % itemsize) {
114 PyErr_SetString(PyExc_ValueError,
115 "offset must be a multiple of itemsize");
116 return NULL;
117 }
118 if (offset < 0 || offset+itemsize > len) {
119 PyErr_SetString(PyExc_ValueError, "offset out of bounds");
120 return NULL;
121 }
122
123 ndbuf = PyMem_Malloc(sizeof *ndbuf);
124 if (ndbuf == NULL) {
125 PyErr_NoMemory();
126 return NULL;
127 }
128
129 ndbuf->next = NULL;
130 ndbuf->prev = NULL;
131 ndbuf->len = len;
132 ndbuf->offset= offset;
133
134 ndbuf->data = PyMem_Malloc(len);
135 if (ndbuf->data == NULL) {
136 PyErr_NoMemory();
137 PyMem_Free(ndbuf);
138 return NULL;
139 }
140
141 ndbuf->flags = flags;
142 ndbuf->exports = 0;
143
144 base = &ndbuf->base;
145 base->obj = NULL;
146 base->buf = ndbuf->data;
147 base->len = len;
148 base->itemsize = 1;
149 base->readonly = 0;
150 base->format = NULL;
151 base->ndim = 1;
152 base->shape = NULL;
153 base->strides = NULL;
154 base->suboffsets = NULL;
155 base->internal = ndbuf;
156
157 return ndbuf;
158 }
159
160 static void
ndbuf_free(ndbuf_t * ndbuf)161 ndbuf_free(ndbuf_t *ndbuf)
162 {
163 Py_buffer *base = &ndbuf->base;
164
165 PyMem_XFree(ndbuf->data);
166 PyMem_XFree(base->format);
167 PyMem_XFree(base->shape);
168 PyMem_XFree(base->strides);
169 PyMem_XFree(base->suboffsets);
170
171 PyMem_Free(ndbuf);
172 }
173
174 static void
ndbuf_push(NDArrayObject * nd,ndbuf_t * elt)175 ndbuf_push(NDArrayObject *nd, ndbuf_t *elt)
176 {
177 elt->next = nd->head;
178 if (nd->head) nd->head->prev = elt;
179 nd->head = elt;
180 elt->prev = NULL;
181 }
182
183 static void
ndbuf_delete(NDArrayObject * nd,ndbuf_t * elt)184 ndbuf_delete(NDArrayObject *nd, ndbuf_t *elt)
185 {
186 if (elt->prev)
187 elt->prev->next = elt->next;
188 else
189 nd->head = elt->next;
190
191 if (elt->next)
192 elt->next->prev = elt->prev;
193
194 ndbuf_free(elt);
195 }
196
197 static void
ndbuf_pop(NDArrayObject * nd)198 ndbuf_pop(NDArrayObject *nd)
199 {
200 ndbuf_delete(nd, nd->head);
201 }
202
203
204 static PyObject *
ndarray_new(PyTypeObject * type,PyObject * args,PyObject * kwds)205 ndarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
206 {
207 NDArrayObject *nd;
208
209 nd = PyObject_New(NDArrayObject, &NDArray_Type);
210 if (nd == NULL)
211 return NULL;
212
213 nd->flags = 0;
214 nd->head = NULL;
215 return (PyObject *)nd;
216 }
217
218 static void
ndarray_dealloc(NDArrayObject * self)219 ndarray_dealloc(NDArrayObject *self)
220 {
221 if (self->head) {
222 if (ND_IS_CONSUMER(self)) {
223 Py_buffer *base = &self->head->base;
224 if (self->head->flags & ND_OWN_ARRAYS) {
225 PyMem_XFree(base->shape);
226 PyMem_XFree(base->strides);
227 PyMem_XFree(base->suboffsets);
228 }
229 PyBuffer_Release(base);
230 }
231 else {
232 while (self->head)
233 ndbuf_pop(self);
234 }
235 }
236 PyObject_Free(self);
237 }
238
239 static int
ndarray_init_staticbuf(PyObject * exporter,NDArrayObject * nd,int flags)240 ndarray_init_staticbuf(PyObject *exporter, NDArrayObject *nd, int flags)
241 {
242 Py_buffer *base = &nd->staticbuf.base;
243
244 if (PyObject_GetBuffer(exporter, base, flags) < 0)
245 return -1;
246
247 nd->head = &nd->staticbuf;
248
249 nd->head->next = NULL;
250 nd->head->prev = NULL;
251 nd->head->len = -1;
252 nd->head->offset = -1;
253 nd->head->data = NULL;
254
255 nd->head->flags = base->readonly ? 0 : ND_WRITABLE;
256 nd->head->exports = 0;
257
258 return 0;
259 }
260
261 static void
init_flags(ndbuf_t * ndbuf)262 init_flags(ndbuf_t *ndbuf)
263 {
264 if (ndbuf->base.ndim == 0)
265 ndbuf->flags |= ND_SCALAR;
266 if (ndbuf->base.suboffsets)
267 ndbuf->flags |= ND_PIL;
268 if (PyBuffer_IsContiguous(&ndbuf->base, 'C'))
269 ndbuf->flags |= ND_C;
270 if (PyBuffer_IsContiguous(&ndbuf->base, 'F'))
271 ndbuf->flags |= ND_FORTRAN;
272 }
273
274
275 /****************************************************************************/
276 /* Buffer/List conversions */
277 /****************************************************************************/
278
279 static Py_ssize_t *strides_from_shape(const ndbuf_t *, int flags);
280
281 /* Get number of members in a struct: see issue #12740 */
282 typedef struct {
283 PyObject_HEAD
284 Py_ssize_t s_size;
285 Py_ssize_t s_len;
286 } PyPartialStructObject;
287
288 static Py_ssize_t
get_nmemb(PyObject * s)289 get_nmemb(PyObject *s)
290 {
291 return ((PyPartialStructObject *)s)->s_len;
292 }
293
294 /* Pack all items into the buffer of 'obj'. The 'format' parameter must be
295 in struct module syntax. For standard C types, a single item is an integer.
296 For compound types, a single item is a tuple of integers. */
297 static int
pack_from_list(PyObject * obj,PyObject * items,PyObject * format,Py_ssize_t itemsize)298 pack_from_list(PyObject *obj, PyObject *items, PyObject *format,
299 Py_ssize_t itemsize)
300 {
301 PyObject *structobj, *pack_into;
302 PyObject *args, *offset;
303 PyObject *item, *tmp;
304 Py_ssize_t nitems; /* number of items */
305 Py_ssize_t nmemb; /* number of members in a single item */
306 Py_ssize_t i, j;
307 int ret = 0;
308
309 assert(PyObject_CheckBuffer(obj));
310 assert(PyList_Check(items) || PyTuple_Check(items));
311
312 structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
313 if (structobj == NULL)
314 return -1;
315
316 nitems = PySequence_Fast_GET_SIZE(items);
317 nmemb = get_nmemb(structobj);
318 assert(nmemb >= 1);
319
320 pack_into = PyObject_GetAttrString(structobj, "pack_into");
321 if (pack_into == NULL) {
322 Py_DECREF(structobj);
323 return -1;
324 }
325
326 /* nmemb >= 1 */
327 args = PyTuple_New(2 + nmemb);
328 if (args == NULL) {
329 Py_DECREF(pack_into);
330 Py_DECREF(structobj);
331 return -1;
332 }
333
334 offset = NULL;
335 for (i = 0; i < nitems; i++) {
336 /* Loop invariant: args[j] are borrowed references or NULL. */
337 PyTuple_SET_ITEM(args, 0, obj);
338 for (j = 1; j < 2+nmemb; j++)
339 PyTuple_SET_ITEM(args, j, NULL);
340
341 Py_XDECREF(offset);
342 offset = PyLong_FromSsize_t(i*itemsize);
343 if (offset == NULL) {
344 ret = -1;
345 break;
346 }
347 PyTuple_SET_ITEM(args, 1, offset);
348
349 item = PySequence_Fast_GET_ITEM(items, i);
350 if ((PyBytes_Check(item) || PyLong_Check(item) ||
351 PyFloat_Check(item)) && nmemb == 1) {
352 PyTuple_SET_ITEM(args, 2, item);
353 }
354 else if ((PyList_Check(item) || PyTuple_Check(item)) &&
355 PySequence_Length(item) == nmemb) {
356 for (j = 0; j < nmemb; j++) {
357 tmp = PySequence_Fast_GET_ITEM(item, j);
358 PyTuple_SET_ITEM(args, 2+j, tmp);
359 }
360 }
361 else {
362 PyErr_SetString(PyExc_ValueError,
363 "mismatch between initializer element and format string");
364 ret = -1;
365 break;
366 }
367
368 tmp = PyObject_CallObject(pack_into, args);
369 if (tmp == NULL) {
370 ret = -1;
371 break;
372 }
373 Py_DECREF(tmp);
374 }
375
376 Py_INCREF(obj); /* args[0] */
377 /* args[1]: offset is either NULL or should be dealloc'd */
378 for (i = 2; i < 2+nmemb; i++) {
379 tmp = PyTuple_GET_ITEM(args, i);
380 Py_XINCREF(tmp);
381 }
382 Py_DECREF(args);
383
384 Py_DECREF(pack_into);
385 Py_DECREF(structobj);
386 return ret;
387
388 }
389
390 /* Pack single element */
391 static int
pack_single(char * ptr,PyObject * item,const char * fmt,Py_ssize_t itemsize)392 pack_single(char *ptr, PyObject *item, const char *fmt, Py_ssize_t itemsize)
393 {
394 PyObject *structobj = NULL, *pack_into = NULL, *args = NULL;
395 PyObject *format = NULL, *mview = NULL, *zero = NULL;
396 Py_ssize_t i, nmemb;
397 int ret = -1;
398 PyObject *x;
399
400 if (fmt == NULL) fmt = "B";
401
402 format = PyUnicode_FromString(fmt);
403 if (format == NULL)
404 goto out;
405
406 structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
407 if (structobj == NULL)
408 goto out;
409
410 nmemb = get_nmemb(structobj);
411 assert(nmemb >= 1);
412
413 mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_WRITE);
414 if (mview == NULL)
415 goto out;
416
417 zero = PyLong_FromLong(0);
418 if (zero == NULL)
419 goto out;
420
421 pack_into = PyObject_GetAttrString(structobj, "pack_into");
422 if (pack_into == NULL)
423 goto out;
424
425 args = PyTuple_New(2+nmemb);
426 if (args == NULL)
427 goto out;
428
429 PyTuple_SET_ITEM(args, 0, mview);
430 PyTuple_SET_ITEM(args, 1, zero);
431
432 if ((PyBytes_Check(item) || PyLong_Check(item) ||
433 PyFloat_Check(item)) && nmemb == 1) {
434 PyTuple_SET_ITEM(args, 2, item);
435 }
436 else if ((PyList_Check(item) || PyTuple_Check(item)) &&
437 PySequence_Length(item) == nmemb) {
438 for (i = 0; i < nmemb; i++) {
439 x = PySequence_Fast_GET_ITEM(item, i);
440 PyTuple_SET_ITEM(args, 2+i, x);
441 }
442 }
443 else {
444 PyErr_SetString(PyExc_ValueError,
445 "mismatch between initializer element and format string");
446 goto args_out;
447 }
448
449 x = PyObject_CallObject(pack_into, args);
450 if (x != NULL) {
451 Py_DECREF(x);
452 ret = 0;
453 }
454
455
456 args_out:
457 for (i = 0; i < 2+nmemb; i++)
458 Py_XINCREF(PyTuple_GET_ITEM(args, i));
459 Py_XDECREF(args);
460 out:
461 Py_XDECREF(pack_into);
462 Py_XDECREF(zero);
463 Py_XDECREF(mview);
464 Py_XDECREF(structobj);
465 Py_XDECREF(format);
466 return ret;
467 }
468
469 static void
copy_rec(const Py_ssize_t * shape,Py_ssize_t ndim,Py_ssize_t itemsize,char * dptr,const Py_ssize_t * dstrides,const Py_ssize_t * dsuboffsets,char * sptr,const Py_ssize_t * sstrides,const Py_ssize_t * ssuboffsets,char * mem)470 copy_rec(const Py_ssize_t *shape, Py_ssize_t ndim, Py_ssize_t itemsize,
471 char *dptr, const Py_ssize_t *dstrides, const Py_ssize_t *dsuboffsets,
472 char *sptr, const Py_ssize_t *sstrides, const Py_ssize_t *ssuboffsets,
473 char *mem)
474 {
475 Py_ssize_t i;
476
477 assert(ndim >= 1);
478
479 if (ndim == 1) {
480 if (!HAVE_PTR(dsuboffsets) && !HAVE_PTR(ssuboffsets) &&
481 dstrides[0] == itemsize && sstrides[0] == itemsize) {
482 memmove(dptr, sptr, shape[0] * itemsize);
483 }
484 else {
485 char *p;
486 assert(mem != NULL);
487 for (i=0, p=mem; i<shape[0]; p+=itemsize, sptr+=sstrides[0], i++) {
488 char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
489 memcpy(p, xsptr, itemsize);
490 }
491 for (i=0, p=mem; i<shape[0]; p+=itemsize, dptr+=dstrides[0], i++) {
492 char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
493 memcpy(xdptr, p, itemsize);
494 }
495 }
496 return;
497 }
498
499 for (i = 0; i < shape[0]; dptr+=dstrides[0], sptr+=sstrides[0], i++) {
500 char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
501 char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
502
503 copy_rec(shape+1, ndim-1, itemsize,
504 xdptr, dstrides+1, dsuboffsets ? dsuboffsets+1 : NULL,
505 xsptr, sstrides+1, ssuboffsets ? ssuboffsets+1 : NULL,
506 mem);
507 }
508 }
509
510 static int
cmp_structure(Py_buffer * dest,Py_buffer * src)511 cmp_structure(Py_buffer *dest, Py_buffer *src)
512 {
513 Py_ssize_t i;
514
515 if (strcmp(FIX_FORMAT(dest->format), FIX_FORMAT(src->format)) != 0 ||
516 dest->itemsize != src->itemsize ||
517 dest->ndim != src->ndim)
518 return -1;
519
520 for (i = 0; i < dest->ndim; i++) {
521 if (dest->shape[i] != src->shape[i])
522 return -1;
523 if (dest->shape[i] == 0)
524 break;
525 }
526
527 return 0;
528 }
529
530 /* Copy src to dest. Both buffers must have the same format, itemsize,
531 ndim and shape. Copying is atomic, the function never fails with
532 a partial copy. */
533 static int
copy_buffer(Py_buffer * dest,Py_buffer * src)534 copy_buffer(Py_buffer *dest, Py_buffer *src)
535 {
536 char *mem = NULL;
537
538 assert(dest->ndim > 0);
539
540 if (cmp_structure(dest, src) < 0) {
541 PyErr_SetString(PyExc_ValueError,
542 "ndarray assignment: lvalue and rvalue have different structures");
543 return -1;
544 }
545
546 if ((dest->suboffsets && dest->suboffsets[dest->ndim-1] >= 0) ||
547 (src->suboffsets && src->suboffsets[src->ndim-1] >= 0) ||
548 dest->strides[dest->ndim-1] != dest->itemsize ||
549 src->strides[src->ndim-1] != src->itemsize) {
550 mem = PyMem_Malloc(dest->shape[dest->ndim-1] * dest->itemsize);
551 if (mem == NULL) {
552 PyErr_NoMemory();
553 return -1;
554 }
555 }
556
557 copy_rec(dest->shape, dest->ndim, dest->itemsize,
558 dest->buf, dest->strides, dest->suboffsets,
559 src->buf, src->strides, src->suboffsets,
560 mem);
561
562 PyMem_XFree(mem);
563 return 0;
564 }
565
566
567 /* Unpack single element */
568 static PyObject *
unpack_single(char * ptr,const char * fmt,Py_ssize_t itemsize)569 unpack_single(char *ptr, const char *fmt, Py_ssize_t itemsize)
570 {
571 PyObject *x, *unpack_from, *mview;
572
573 if (fmt == NULL) {
574 fmt = "B";
575 itemsize = 1;
576 }
577
578 unpack_from = PyObject_GetAttrString(structmodule, "unpack_from");
579 if (unpack_from == NULL)
580 return NULL;
581
582 mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_READ);
583 if (mview == NULL) {
584 Py_DECREF(unpack_from);
585 return NULL;
586 }
587
588 x = PyObject_CallFunction(unpack_from, "sO", fmt, mview);
589 Py_DECREF(unpack_from);
590 Py_DECREF(mview);
591 if (x == NULL)
592 return NULL;
593
594 if (PyTuple_GET_SIZE(x) == 1) {
595 PyObject *tmp = PyTuple_GET_ITEM(x, 0);
596 Py_INCREF(tmp);
597 Py_DECREF(x);
598 return tmp;
599 }
600
601 return x;
602 }
603
604 /* Unpack a multi-dimensional matrix into a nested list. Return a scalar
605 for ndim = 0. */
606 static PyObject *
unpack_rec(PyObject * unpack_from,char * ptr,PyObject * mview,char * item,const Py_ssize_t * shape,const Py_ssize_t * strides,const Py_ssize_t * suboffsets,Py_ssize_t ndim,Py_ssize_t itemsize)607 unpack_rec(PyObject *unpack_from, char *ptr, PyObject *mview, char *item,
608 const Py_ssize_t *shape, const Py_ssize_t *strides,
609 const Py_ssize_t *suboffsets, Py_ssize_t ndim, Py_ssize_t itemsize)
610 {
611 PyObject *lst, *x;
612 Py_ssize_t i;
613
614 assert(ndim >= 0);
615 assert(shape != NULL);
616 assert(strides != NULL);
617
618 if (ndim == 0) {
619 memcpy(item, ptr, itemsize);
620 x = PyObject_CallFunctionObjArgs(unpack_from, mview, NULL);
621 if (x == NULL)
622 return NULL;
623 if (PyTuple_GET_SIZE(x) == 1) {
624 PyObject *tmp = PyTuple_GET_ITEM(x, 0);
625 Py_INCREF(tmp);
626 Py_DECREF(x);
627 return tmp;
628 }
629 return x;
630 }
631
632 lst = PyList_New(shape[0]);
633 if (lst == NULL)
634 return NULL;
635
636 for (i = 0; i < shape[0]; ptr+=strides[0], i++) {
637 char *nextptr = ADJUST_PTR(ptr, suboffsets);
638
639 x = unpack_rec(unpack_from, nextptr, mview, item,
640 shape+1, strides+1, suboffsets ? suboffsets+1 : NULL,
641 ndim-1, itemsize);
642 if (x == NULL) {
643 Py_DECREF(lst);
644 return NULL;
645 }
646
647 PyList_SET_ITEM(lst, i, x);
648 }
649
650 return lst;
651 }
652
653
654 static PyObject *
ndarray_as_list(NDArrayObject * nd)655 ndarray_as_list(NDArrayObject *nd)
656 {
657 PyObject *structobj = NULL, *unpack_from = NULL;
658 PyObject *lst = NULL, *mview = NULL;
659 Py_buffer *base = &nd->head->base;
660 Py_ssize_t *shape = base->shape;
661 Py_ssize_t *strides = base->strides;
662 Py_ssize_t simple_shape[1];
663 Py_ssize_t simple_strides[1];
664 char *item = NULL;
665 PyObject *format;
666 char *fmt = base->format;
667
668 base = &nd->head->base;
669
670 if (fmt == NULL) {
671 PyErr_SetString(PyExc_ValueError,
672 "ndarray: tolist() does not support format=NULL, use "
673 "tobytes()");
674 return NULL;
675 }
676 if (shape == NULL) {
677 assert(ND_C_CONTIGUOUS(nd->head->flags));
678 assert(base->strides == NULL);
679 assert(base->ndim <= 1);
680 shape = simple_shape;
681 shape[0] = base->len;
682 strides = simple_strides;
683 strides[0] = base->itemsize;
684 }
685 else if (strides == NULL) {
686 assert(ND_C_CONTIGUOUS(nd->head->flags));
687 strides = strides_from_shape(nd->head, 0);
688 if (strides == NULL)
689 return NULL;
690 }
691
692 format = PyUnicode_FromString(fmt);
693 if (format == NULL)
694 goto out;
695
696 structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
697 Py_DECREF(format);
698 if (structobj == NULL)
699 goto out;
700
701 unpack_from = PyObject_GetAttrString(structobj, "unpack_from");
702 if (unpack_from == NULL)
703 goto out;
704
705 item = PyMem_Malloc(base->itemsize);
706 if (item == NULL) {
707 PyErr_NoMemory();
708 goto out;
709 }
710
711 mview = PyMemoryView_FromMemory(item, base->itemsize, PyBUF_WRITE);
712 if (mview == NULL)
713 goto out;
714
715 lst = unpack_rec(unpack_from, base->buf, mview, item,
716 shape, strides, base->suboffsets,
717 base->ndim, base->itemsize);
718
719 out:
720 Py_XDECREF(mview);
721 PyMem_XFree(item);
722 Py_XDECREF(unpack_from);
723 Py_XDECREF(structobj);
724 if (strides != base->strides && strides != simple_strides)
725 PyMem_XFree(strides);
726
727 return lst;
728 }
729
730
731 /****************************************************************************/
732 /* Initialize ndbuf */
733 /****************************************************************************/
734
735 /*
736 State of a new ndbuf during initialization. 'OK' means that initialization
737 is complete. 'PTR' means that a pointer has been initialized, but the
738 state of the memory is still undefined and ndbuf->offset is disregarded.
739
740 +-----------------+-----------+-------------+----------------+
741 | | ndbuf_new | init_simple | init_structure |
742 +-----------------+-----------+-------------+----------------+
743 | next | OK (NULL) | OK | OK |
744 +-----------------+-----------+-------------+----------------+
745 | prev | OK (NULL) | OK | OK |
746 +-----------------+-----------+-------------+----------------+
747 | len | OK | OK | OK |
748 +-----------------+-----------+-------------+----------------+
749 | offset | OK | OK | OK |
750 +-----------------+-----------+-------------+----------------+
751 | data | PTR | OK | OK |
752 +-----------------+-----------+-------------+----------------+
753 | flags | user | user | OK |
754 +-----------------+-----------+-------------+----------------+
755 | exports | OK (0) | OK | OK |
756 +-----------------+-----------+-------------+----------------+
757 | base.obj | OK (NULL) | OK | OK |
758 +-----------------+-----------+-------------+----------------+
759 | base.buf | PTR | PTR | OK |
760 +-----------------+-----------+-------------+----------------+
761 | base.len | len(data) | len(data) | OK |
762 +-----------------+-----------+-------------+----------------+
763 | base.itemsize | 1 | OK | OK |
764 +-----------------+-----------+-------------+----------------+
765 | base.readonly | 0 | OK | OK |
766 +-----------------+-----------+-------------+----------------+
767 | base.format | NULL | OK | OK |
768 +-----------------+-----------+-------------+----------------+
769 | base.ndim | 1 | 1 | OK |
770 +-----------------+-----------+-------------+----------------+
771 | base.shape | NULL | NULL | OK |
772 +-----------------+-----------+-------------+----------------+
773 | base.strides | NULL | NULL | OK |
774 +-----------------+-----------+-------------+----------------+
775 | base.suboffsets | NULL | NULL | OK |
776 +-----------------+-----------+-------------+----------------+
777 | base.internal | OK | OK | OK |
778 +-----------------+-----------+-------------+----------------+
779
780 */
781
782 static Py_ssize_t
get_itemsize(PyObject * format)783 get_itemsize(PyObject *format)
784 {
785 PyObject *tmp;
786 Py_ssize_t itemsize;
787
788 tmp = PyObject_CallFunctionObjArgs(calcsize, format, NULL);
789 if (tmp == NULL)
790 return -1;
791 itemsize = PyLong_AsSsize_t(tmp);
792 Py_DECREF(tmp);
793
794 return itemsize;
795 }
796
797 static char *
get_format(PyObject * format)798 get_format(PyObject *format)
799 {
800 PyObject *tmp;
801 char *fmt;
802
803 tmp = PyUnicode_AsASCIIString(format);
804 if (tmp == NULL)
805 return NULL;
806 fmt = PyMem_Malloc(PyBytes_GET_SIZE(tmp)+1);
807 if (fmt == NULL) {
808 PyErr_NoMemory();
809 Py_DECREF(tmp);
810 return NULL;
811 }
812 strcpy(fmt, PyBytes_AS_STRING(tmp));
813 Py_DECREF(tmp);
814
815 return fmt;
816 }
817
818 static int
init_simple(ndbuf_t * ndbuf,PyObject * items,PyObject * format,Py_ssize_t itemsize)819 init_simple(ndbuf_t *ndbuf, PyObject *items, PyObject *format,
820 Py_ssize_t itemsize)
821 {
822 PyObject *mview;
823 Py_buffer *base = &ndbuf->base;
824 int ret;
825
826 mview = PyMemoryView_FromBuffer(base);
827 if (mview == NULL)
828 return -1;
829
830 ret = pack_from_list(mview, items, format, itemsize);
831 Py_DECREF(mview);
832 if (ret < 0)
833 return -1;
834
835 base->readonly = !(ndbuf->flags & ND_WRITABLE);
836 base->itemsize = itemsize;
837 base->format = get_format(format);
838 if (base->format == NULL)
839 return -1;
840
841 return 0;
842 }
843
844 static Py_ssize_t *
seq_as_ssize_array(PyObject * seq,Py_ssize_t len,int is_shape)845 seq_as_ssize_array(PyObject *seq, Py_ssize_t len, int is_shape)
846 {
847 Py_ssize_t *dest;
848 Py_ssize_t x, i;
849
850 /* ndim = len <= ND_MAX_NDIM, so PyMem_New() is actually not needed. */
851 dest = PyMem_New(Py_ssize_t, len);
852 if (dest == NULL) {
853 PyErr_NoMemory();
854 return NULL;
855 }
856
857 for (i = 0; i < len; i++) {
858 PyObject *tmp = PySequence_Fast_GET_ITEM(seq, i);
859 if (!PyLong_Check(tmp)) {
860 PyErr_Format(PyExc_ValueError,
861 "elements of %s must be integers",
862 is_shape ? "shape" : "strides");
863 PyMem_Free(dest);
864 return NULL;
865 }
866 x = PyLong_AsSsize_t(tmp);
867 if (PyErr_Occurred()) {
868 PyMem_Free(dest);
869 return NULL;
870 }
871 if (is_shape && x < 0) {
872 PyErr_Format(PyExc_ValueError,
873 "elements of shape must be integers >= 0");
874 PyMem_Free(dest);
875 return NULL;
876 }
877 dest[i] = x;
878 }
879
880 return dest;
881 }
882
883 static Py_ssize_t *
strides_from_shape(const ndbuf_t * ndbuf,int flags)884 strides_from_shape(const ndbuf_t *ndbuf, int flags)
885 {
886 const Py_buffer *base = &ndbuf->base;
887 Py_ssize_t *s, i;
888
889 s = PyMem_Malloc(base->ndim * (sizeof *s));
890 if (s == NULL) {
891 PyErr_NoMemory();
892 return NULL;
893 }
894
895 if (flags & ND_FORTRAN) {
896 s[0] = base->itemsize;
897 for (i = 1; i < base->ndim; i++)
898 s[i] = s[i-1] * base->shape[i-1];
899 }
900 else {
901 s[base->ndim-1] = base->itemsize;
902 for (i = base->ndim-2; i >= 0; i--)
903 s[i] = s[i+1] * base->shape[i+1];
904 }
905
906 return s;
907 }
908
909 /* Bounds check:
910
911 len := complete length of allocated memory
912 offset := start of the array
913
914 A single array element is indexed by:
915
916 i = indices[0] * strides[0] + indices[1] * strides[1] + ...
917
918 imin is reached when all indices[n] combined with positive strides are 0
919 and all indices combined with negative strides are shape[n]-1, which is
920 the maximum index for the nth dimension.
921
922 imax is reached when all indices[n] combined with negative strides are 0
923 and all indices combined with positive strides are shape[n]-1.
924 */
925 static int
verify_structure(Py_ssize_t len,Py_ssize_t itemsize,Py_ssize_t offset,const Py_ssize_t * shape,const Py_ssize_t * strides,Py_ssize_t ndim)926 verify_structure(Py_ssize_t len, Py_ssize_t itemsize, Py_ssize_t offset,
927 const Py_ssize_t *shape, const Py_ssize_t *strides,
928 Py_ssize_t ndim)
929 {
930 Py_ssize_t imin, imax;
931 Py_ssize_t n;
932
933 assert(ndim >= 0);
934
935 if (ndim == 0 && (offset < 0 || offset+itemsize > len))
936 goto invalid_combination;
937
938 for (n = 0; n < ndim; n++)
939 if (strides[n] % itemsize) {
940 PyErr_SetString(PyExc_ValueError,
941 "strides must be a multiple of itemsize");
942 return -1;
943 }
944
945 for (n = 0; n < ndim; n++)
946 if (shape[n] == 0)
947 return 0;
948
949 imin = imax = 0;
950 for (n = 0; n < ndim; n++)
951 if (strides[n] <= 0)
952 imin += (shape[n]-1) * strides[n];
953 else
954 imax += (shape[n]-1) * strides[n];
955
956 if (imin + offset < 0 || imax + offset + itemsize > len)
957 goto invalid_combination;
958
959 return 0;
960
961
962 invalid_combination:
963 PyErr_SetString(PyExc_ValueError,
964 "invalid combination of buffer, shape and strides");
965 return -1;
966 }
967
968 /*
969 Convert a NumPy-style array to an array using suboffsets to stride in
970 the first dimension. Requirements: ndim > 0.
971
972 Contiguous example
973 ==================
974
975 Input:
976 ------
977 shape = {2, 2, 3};
978 strides = {6, 3, 1};
979 suboffsets = NULL;
980 data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
981 buf = &data[0]
982
983 Output:
984 -------
985 shape = {2, 2, 3};
986 strides = {sizeof(char *), 3, 1};
987 suboffsets = {0, -1, -1};
988 data = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
989 | | ^ ^
990 `---'---' |
991 | |
992 `---------------------'
993 buf = &data[0]
994
995 So, in the example the input resembles the three-dimensional array
996 char v[2][2][3], while the output resembles an array of two pointers
997 to two-dimensional arrays: char (*v[2])[2][3].
998
999
1000 Non-contiguous example:
1001 =======================
1002
1003 Input (with offset and negative strides):
1004 -----------------------------------------
1005 shape = {2, 2, 3};
1006 strides = {-6, 3, -1};
1007 offset = 8
1008 suboffsets = NULL;
1009 data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1010
1011 Output:
1012 -------
1013 shape = {2, 2, 3};
1014 strides = {-sizeof(char *), 3, -1};
1015 suboffsets = {2, -1, -1};
1016 newdata = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
1017 | | ^ ^ ^ ^
1018 `---'---' | | `- p2+suboffsets[0]
1019 | `-----------|--- p1+suboffsets[0]
1020 `---------------------'
1021 buf = &newdata[1] # striding backwards over the pointers.
1022
1023 suboffsets[0] is the same as the offset that one would specify if
1024 the two {2, 3} subarrays were created directly, hence the name.
1025 */
1026 static int
init_suboffsets(ndbuf_t * ndbuf)1027 init_suboffsets(ndbuf_t *ndbuf)
1028 {
1029 Py_buffer *base = &ndbuf->base;
1030 Py_ssize_t start, step;
1031 Py_ssize_t imin, suboffset0;
1032 Py_ssize_t addsize;
1033 Py_ssize_t n;
1034 char *data;
1035
1036 assert(base->ndim > 0);
1037 assert(base->suboffsets == NULL);
1038
1039 /* Allocate new data with additional space for shape[0] pointers. */
1040 addsize = base->shape[0] * (sizeof (char *));
1041
1042 /* Align array start to a multiple of 8. */
1043 addsize = 8 * ((addsize + 7) / 8);
1044
1045 data = PyMem_Malloc(ndbuf->len + addsize);
1046 if (data == NULL) {
1047 PyErr_NoMemory();
1048 return -1;
1049 }
1050
1051 memcpy(data + addsize, ndbuf->data, ndbuf->len);
1052
1053 PyMem_Free(ndbuf->data);
1054 ndbuf->data = data;
1055 ndbuf->len += addsize;
1056 base->buf = ndbuf->data;
1057
1058 /* imin: minimum index of the input array relative to ndbuf->offset.
1059 suboffset0: offset for each sub-array of the output. This is the
1060 same as calculating -imin' for a sub-array of ndim-1. */
1061 imin = suboffset0 = 0;
1062 for (n = 0; n < base->ndim; n++) {
1063 if (base->shape[n] == 0)
1064 break;
1065 if (base->strides[n] <= 0) {
1066 Py_ssize_t x = (base->shape[n]-1) * base->strides[n];
1067 imin += x;
1068 suboffset0 += (n >= 1) ? -x : 0;
1069 }
1070 }
1071
1072 /* Initialize the array of pointers to the sub-arrays. */
1073 start = addsize + ndbuf->offset + imin;
1074 step = base->strides[0] < 0 ? -base->strides[0] : base->strides[0];
1075
1076 for (n = 0; n < base->shape[0]; n++)
1077 ((char **)base->buf)[n] = (char *)base->buf + start + n*step;
1078
1079 /* Initialize suboffsets. */
1080 base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
1081 if (base->suboffsets == NULL) {
1082 PyErr_NoMemory();
1083 return -1;
1084 }
1085 base->suboffsets[0] = suboffset0;
1086 for (n = 1; n < base->ndim; n++)
1087 base->suboffsets[n] = -1;
1088
1089 /* Adjust strides for the first (zeroth) dimension. */
1090 if (base->strides[0] >= 0) {
1091 base->strides[0] = sizeof(char *);
1092 }
1093 else {
1094 /* Striding backwards. */
1095 base->strides[0] = -(Py_ssize_t)sizeof(char *);
1096 if (base->shape[0] > 0)
1097 base->buf = (char *)base->buf + (base->shape[0]-1) * sizeof(char *);
1098 }
1099
1100 ndbuf->flags &= ~(ND_C|ND_FORTRAN);
1101 ndbuf->offset = 0;
1102 return 0;
1103 }
1104
1105 static void
init_len(Py_buffer * base)1106 init_len(Py_buffer *base)
1107 {
1108 Py_ssize_t i;
1109
1110 base->len = 1;
1111 for (i = 0; i < base->ndim; i++)
1112 base->len *= base->shape[i];
1113 base->len *= base->itemsize;
1114 }
1115
1116 static int
init_structure(ndbuf_t * ndbuf,PyObject * shape,PyObject * strides,Py_ssize_t ndim)1117 init_structure(ndbuf_t *ndbuf, PyObject *shape, PyObject *strides,
1118 Py_ssize_t ndim)
1119 {
1120 Py_buffer *base = &ndbuf->base;
1121
1122 base->ndim = (int)ndim;
1123 if (ndim == 0) {
1124 if (ndbuf->flags & ND_PIL) {
1125 PyErr_SetString(PyExc_TypeError,
1126 "ndim = 0 cannot be used in conjunction with ND_PIL");
1127 return -1;
1128 }
1129 ndbuf->flags |= (ND_SCALAR|ND_C|ND_FORTRAN);
1130 return 0;
1131 }
1132
1133 /* shape */
1134 base->shape = seq_as_ssize_array(shape, ndim, 1);
1135 if (base->shape == NULL)
1136 return -1;
1137
1138 /* strides */
1139 if (strides) {
1140 base->strides = seq_as_ssize_array(strides, ndim, 0);
1141 }
1142 else {
1143 base->strides = strides_from_shape(ndbuf, ndbuf->flags);
1144 }
1145 if (base->strides == NULL)
1146 return -1;
1147 if (verify_structure(base->len, base->itemsize, ndbuf->offset,
1148 base->shape, base->strides, ndim) < 0)
1149 return -1;
1150
1151 /* buf */
1152 base->buf = ndbuf->data + ndbuf->offset;
1153
1154 /* len */
1155 init_len(base);
1156
1157 /* ndbuf->flags */
1158 if (PyBuffer_IsContiguous(base, 'C'))
1159 ndbuf->flags |= ND_C;
1160 if (PyBuffer_IsContiguous(base, 'F'))
1161 ndbuf->flags |= ND_FORTRAN;
1162
1163
1164 /* convert numpy array to suboffset representation */
1165 if (ndbuf->flags & ND_PIL) {
1166 /* modifies base->buf, base->strides and base->suboffsets **/
1167 return init_suboffsets(ndbuf);
1168 }
1169
1170 return 0;
1171 }
1172
1173 static ndbuf_t *
init_ndbuf(PyObject * items,PyObject * shape,PyObject * strides,Py_ssize_t offset,PyObject * format,int flags)1174 init_ndbuf(PyObject *items, PyObject *shape, PyObject *strides,
1175 Py_ssize_t offset, PyObject *format, int flags)
1176 {
1177 ndbuf_t *ndbuf;
1178 Py_ssize_t ndim;
1179 Py_ssize_t nitems;
1180 Py_ssize_t itemsize;
1181
1182 /* ndim = len(shape) */
1183 CHECK_LIST_OR_TUPLE(shape)
1184 ndim = PySequence_Fast_GET_SIZE(shape);
1185 if (ndim > ND_MAX_NDIM) {
1186 PyErr_Format(PyExc_ValueError,
1187 "ndim must not exceed %d", ND_MAX_NDIM);
1188 return NULL;
1189 }
1190
1191 /* len(strides) = len(shape) */
1192 if (strides) {
1193 CHECK_LIST_OR_TUPLE(strides)
1194 if (PySequence_Fast_GET_SIZE(strides) == 0)
1195 strides = NULL;
1196 else if (flags & ND_FORTRAN) {
1197 PyErr_SetString(PyExc_TypeError,
1198 "ND_FORTRAN cannot be used together with strides");
1199 return NULL;
1200 }
1201 else if (PySequence_Fast_GET_SIZE(strides) != ndim) {
1202 PyErr_SetString(PyExc_ValueError,
1203 "len(shape) != len(strides)");
1204 return NULL;
1205 }
1206 }
1207
1208 /* itemsize */
1209 itemsize = get_itemsize(format);
1210 if (itemsize <= 0) {
1211 if (itemsize == 0) {
1212 PyErr_SetString(PyExc_ValueError,
1213 "itemsize must not be zero");
1214 }
1215 return NULL;
1216 }
1217
1218 /* convert scalar to list */
1219 if (ndim == 0) {
1220 items = PyTuple_Pack(1, items);
1221 if (items == NULL)
1222 return NULL;
1223 }
1224 else {
1225 CHECK_LIST_OR_TUPLE(items)
1226 Py_INCREF(items);
1227 }
1228
1229 /* number of items */
1230 nitems = PySequence_Fast_GET_SIZE(items);
1231 if (nitems == 0) {
1232 PyErr_SetString(PyExc_ValueError,
1233 "initializer list or tuple must not be empty");
1234 Py_DECREF(items);
1235 return NULL;
1236 }
1237
1238 ndbuf = ndbuf_new(nitems, itemsize, offset, flags);
1239 if (ndbuf == NULL) {
1240 Py_DECREF(items);
1241 return NULL;
1242 }
1243
1244
1245 if (init_simple(ndbuf, items, format, itemsize) < 0)
1246 goto error;
1247 if (init_structure(ndbuf, shape, strides, ndim) < 0)
1248 goto error;
1249
1250 Py_DECREF(items);
1251 return ndbuf;
1252
1253 error:
1254 Py_DECREF(items);
1255 ndbuf_free(ndbuf);
1256 return NULL;
1257 }
1258
1259 /* initialize and push a new base onto the linked list */
1260 static int
ndarray_push_base(NDArrayObject * nd,PyObject * items,PyObject * shape,PyObject * strides,Py_ssize_t offset,PyObject * format,int flags)1261 ndarray_push_base(NDArrayObject *nd, PyObject *items,
1262 PyObject *shape, PyObject *strides,
1263 Py_ssize_t offset, PyObject *format, int flags)
1264 {
1265 ndbuf_t *ndbuf;
1266
1267 ndbuf = init_ndbuf(items, shape, strides, offset, format, flags);
1268 if (ndbuf == NULL)
1269 return -1;
1270
1271 ndbuf_push(nd, ndbuf);
1272 return 0;
1273 }
1274
1275 #define PyBUF_UNUSED 0x10000
1276 static int
ndarray_init(PyObject * self,PyObject * args,PyObject * kwds)1277 ndarray_init(PyObject *self, PyObject *args, PyObject *kwds)
1278 {
1279 NDArrayObject *nd = (NDArrayObject *)self;
1280 static char *kwlist[] = {
1281 "obj", "shape", "strides", "offset", "format", "flags", "getbuf", NULL
1282 };
1283 PyObject *v = NULL; /* initializer: scalar, list, tuple or base object */
1284 PyObject *shape = NULL; /* size of each dimension */
1285 PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
1286 Py_ssize_t offset = 0; /* buffer offset */
1287 PyObject *format = simple_format; /* struct module specifier: "B" */
1288 int flags = ND_DEFAULT; /* base buffer and ndarray flags */
1289
1290 int getbuf = PyBUF_UNUSED; /* re-exporter: getbuffer request flags */
1291
1292
1293 if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|OOnOii", kwlist,
1294 &v, &shape, &strides, &offset, &format, &flags, &getbuf))
1295 return -1;
1296
1297 /* NDArrayObject is re-exporter */
1298 if (PyObject_CheckBuffer(v) && shape == NULL) {
1299 if (strides || offset || format != simple_format ||
1300 !(flags == ND_DEFAULT || flags == ND_REDIRECT)) {
1301 PyErr_SetString(PyExc_TypeError,
1302 "construction from exporter object only takes 'obj', 'getbuf' "
1303 "and 'flags' arguments");
1304 return -1;
1305 }
1306
1307 getbuf = (getbuf == PyBUF_UNUSED) ? PyBUF_FULL_RO : getbuf;
1308
1309 if (ndarray_init_staticbuf(v, nd, getbuf) < 0)
1310 return -1;
1311
1312 init_flags(nd->head);
1313 nd->head->flags |= flags;
1314
1315 return 0;
1316 }
1317
1318 /* NDArrayObject is the original base object. */
1319 if (getbuf != PyBUF_UNUSED) {
1320 PyErr_SetString(PyExc_TypeError,
1321 "getbuf argument only valid for construction from exporter "
1322 "object");
1323 return -1;
1324 }
1325 if (shape == NULL) {
1326 PyErr_SetString(PyExc_TypeError,
1327 "shape is a required argument when constructing from "
1328 "list, tuple or scalar");
1329 return -1;
1330 }
1331
1332 if (flags & ND_VAREXPORT) {
1333 nd->flags |= ND_VAREXPORT;
1334 flags &= ~ND_VAREXPORT;
1335 }
1336
1337 /* Initialize and push the first base buffer onto the linked list. */
1338 return ndarray_push_base(nd, v, shape, strides, offset, format, flags);
1339 }
1340
1341 /* Push an additional base onto the linked list. */
1342 static PyObject *
ndarray_push(PyObject * self,PyObject * args,PyObject * kwds)1343 ndarray_push(PyObject *self, PyObject *args, PyObject *kwds)
1344 {
1345 NDArrayObject *nd = (NDArrayObject *)self;
1346 static char *kwlist[] = {
1347 "items", "shape", "strides", "offset", "format", "flags", NULL
1348 };
1349 PyObject *items = NULL; /* initializer: scalar, list or tuple */
1350 PyObject *shape = NULL; /* size of each dimension */
1351 PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
1352 PyObject *format = simple_format; /* struct module specifier: "B" */
1353 Py_ssize_t offset = 0; /* buffer offset */
1354 int flags = ND_DEFAULT; /* base buffer flags */
1355
1356 if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO|OnOi", kwlist,
1357 &items, &shape, &strides, &offset, &format, &flags))
1358 return NULL;
1359
1360 if (flags & ND_VAREXPORT) {
1361 PyErr_SetString(PyExc_ValueError,
1362 "ND_VAREXPORT flag can only be used during object creation");
1363 return NULL;
1364 }
1365 if (ND_IS_CONSUMER(nd)) {
1366 PyErr_SetString(PyExc_BufferError,
1367 "structure of re-exporting object is immutable");
1368 return NULL;
1369 }
1370 if (!(nd->flags&ND_VAREXPORT) && nd->head->exports > 0) {
1371 PyErr_Format(PyExc_BufferError,
1372 "cannot change structure: %zd exported buffer%s",
1373 nd->head->exports, nd->head->exports==1 ? "" : "s");
1374 return NULL;
1375 }
1376
1377 if (ndarray_push_base(nd, items, shape, strides,
1378 offset, format, flags) < 0)
1379 return NULL;
1380 Py_RETURN_NONE;
1381 }
1382
1383 /* Pop a base from the linked list (if possible). */
1384 static PyObject *
ndarray_pop(PyObject * self,PyObject * dummy)1385 ndarray_pop(PyObject *self, PyObject *dummy)
1386 {
1387 NDArrayObject *nd = (NDArrayObject *)self;
1388 if (ND_IS_CONSUMER(nd)) {
1389 PyErr_SetString(PyExc_BufferError,
1390 "structure of re-exporting object is immutable");
1391 return NULL;
1392 }
1393 if (nd->head->exports > 0) {
1394 PyErr_Format(PyExc_BufferError,
1395 "cannot change structure: %zd exported buffer%s",
1396 nd->head->exports, nd->head->exports==1 ? "" : "s");
1397 return NULL;
1398 }
1399 if (nd->head->next == NULL) {
1400 PyErr_SetString(PyExc_BufferError,
1401 "list only has a single base");
1402 return NULL;
1403 }
1404
1405 ndbuf_pop(nd);
1406 Py_RETURN_NONE;
1407 }
1408
1409 /**************************************************************************/
1410 /* getbuffer */
1411 /**************************************************************************/
1412
1413 static int
ndarray_getbuf(NDArrayObject * self,Py_buffer * view,int flags)1414 ndarray_getbuf(NDArrayObject *self, Py_buffer *view, int flags)
1415 {
1416 ndbuf_t *ndbuf = self->head;
1417 Py_buffer *base = &ndbuf->base;
1418 int baseflags = ndbuf->flags;
1419
1420 /* redirect mode */
1421 if (base->obj != NULL && (baseflags&ND_REDIRECT)) {
1422 return PyObject_GetBuffer(base->obj, view, flags);
1423 }
1424
1425 /* start with complete information */
1426 *view = *base;
1427 view->obj = NULL;
1428
1429 /* reconstruct format */
1430 if (view->format == NULL)
1431 view->format = "B";
1432
1433 if (base->ndim != 0 &&
1434 ((REQ_SHAPE(flags) && base->shape == NULL) ||
1435 (REQ_STRIDES(flags) && base->strides == NULL))) {
1436 /* The ndarray is a re-exporter that has been created without full
1437 information for testing purposes. In this particular case the
1438 ndarray is not a PEP-3118 compliant buffer provider. */
1439 PyErr_SetString(PyExc_BufferError,
1440 "re-exporter does not provide format, shape or strides");
1441 return -1;
1442 }
1443
1444 if (baseflags & ND_GETBUF_FAIL) {
1445 PyErr_SetString(PyExc_BufferError,
1446 "ND_GETBUF_FAIL: forced test exception");
1447 if (baseflags & ND_GETBUF_UNDEFINED)
1448 view->obj = (PyObject *)0x1; /* wrong but permitted in <= 3.2 */
1449 return -1;
1450 }
1451
1452 if (REQ_WRITABLE(flags) && base->readonly) {
1453 PyErr_SetString(PyExc_BufferError,
1454 "ndarray is not writable");
1455 return -1;
1456 }
1457 if (!REQ_FORMAT(flags)) {
1458 /* NULL indicates that the buffer's data type has been cast to 'B'.
1459 view->itemsize is the _previous_ itemsize. If shape is present,
1460 the equality product(shape) * itemsize = len still holds at this
1461 point. The equality calcsize(format) = itemsize does _not_ hold
1462 from here on! */
1463 view->format = NULL;
1464 }
1465
1466 if (REQ_C_CONTIGUOUS(flags) && !ND_C_CONTIGUOUS(baseflags)) {
1467 PyErr_SetString(PyExc_BufferError,
1468 "ndarray is not C-contiguous");
1469 return -1;
1470 }
1471 if (REQ_F_CONTIGUOUS(flags) && !ND_FORTRAN_CONTIGUOUS(baseflags)) {
1472 PyErr_SetString(PyExc_BufferError,
1473 "ndarray is not Fortran contiguous");
1474 return -1;
1475 }
1476 if (REQ_ANY_CONTIGUOUS(flags) && !ND_ANY_CONTIGUOUS(baseflags)) {
1477 PyErr_SetString(PyExc_BufferError,
1478 "ndarray is not contiguous");
1479 return -1;
1480 }
1481 if (!REQ_INDIRECT(flags) && (baseflags & ND_PIL)) {
1482 PyErr_SetString(PyExc_BufferError,
1483 "ndarray cannot be represented without suboffsets");
1484 return -1;
1485 }
1486 if (!REQ_STRIDES(flags)) {
1487 if (!ND_C_CONTIGUOUS(baseflags)) {
1488 PyErr_SetString(PyExc_BufferError,
1489 "ndarray is not C-contiguous");
1490 return -1;
1491 }
1492 view->strides = NULL;
1493 }
1494 if (!REQ_SHAPE(flags)) {
1495 /* PyBUF_SIMPLE or PyBUF_WRITABLE: at this point buf is C-contiguous,
1496 so base->buf = ndbuf->data. */
1497 if (view->format != NULL) {
1498 /* PyBUF_SIMPLE|PyBUF_FORMAT and PyBUF_WRITABLE|PyBUF_FORMAT do
1499 not make sense. */
1500 PyErr_Format(PyExc_BufferError,
1501 "ndarray: cannot cast to unsigned bytes if the format flag "
1502 "is present");
1503 return -1;
1504 }
1505 /* product(shape) * itemsize = len and calcsize(format) = itemsize
1506 do _not_ hold from here on! */
1507 view->ndim = 1;
1508 view->shape = NULL;
1509 }
1510
1511 /* Ascertain that the new buffer has the same contiguity as the exporter */
1512 if (ND_C_CONTIGUOUS(baseflags) != PyBuffer_IsContiguous(view, 'C') ||
1513 /* skip cast to 1-d */
1514 (view->format != NULL && view->shape != NULL &&
1515 ND_FORTRAN_CONTIGUOUS(baseflags) != PyBuffer_IsContiguous(view, 'F')) ||
1516 /* cast to 1-d */
1517 (view->format == NULL && view->shape == NULL &&
1518 !PyBuffer_IsContiguous(view, 'F'))) {
1519 PyErr_SetString(PyExc_BufferError,
1520 "ndarray: contiguity mismatch in getbuf()");
1521 return -1;
1522 }
1523
1524 view->obj = Py_NewRef(self);
1525 self->head->exports++;
1526
1527 return 0;
1528 }
1529
1530 static void
ndarray_releasebuf(NDArrayObject * self,Py_buffer * view)1531 ndarray_releasebuf(NDArrayObject *self, Py_buffer *view)
1532 {
1533 if (!ND_IS_CONSUMER(self)) {
1534 ndbuf_t *ndbuf = view->internal;
1535 if (--ndbuf->exports == 0 && ndbuf != self->head)
1536 ndbuf_delete(self, ndbuf);
1537 }
1538 }
1539
1540 static PyBufferProcs ndarray_as_buffer = {
1541 (getbufferproc)ndarray_getbuf, /* bf_getbuffer */
1542 (releasebufferproc)ndarray_releasebuf /* bf_releasebuffer */
1543 };
1544
1545
1546 /**************************************************************************/
1547 /* indexing/slicing */
1548 /**************************************************************************/
1549
1550 static char *
ptr_from_index(Py_buffer * base,Py_ssize_t index)1551 ptr_from_index(Py_buffer *base, Py_ssize_t index)
1552 {
1553 char *ptr;
1554 Py_ssize_t nitems; /* items in the first dimension */
1555
1556 if (base->shape)
1557 nitems = base->shape[0];
1558 else {
1559 assert(base->ndim == 1 && SIMPLE_FORMAT(base->format));
1560 nitems = base->len;
1561 }
1562
1563 if (index < 0) {
1564 index += nitems;
1565 }
1566 if (index < 0 || index >= nitems) {
1567 PyErr_SetString(PyExc_IndexError, "index out of bounds");
1568 return NULL;
1569 }
1570
1571 ptr = (char *)base->buf;
1572
1573 if (base->strides == NULL)
1574 ptr += base->itemsize * index;
1575 else
1576 ptr += base->strides[0] * index;
1577
1578 ptr = ADJUST_PTR(ptr, base->suboffsets);
1579
1580 return ptr;
1581 }
1582
1583 static PyObject *
ndarray_item(NDArrayObject * self,Py_ssize_t index)1584 ndarray_item(NDArrayObject *self, Py_ssize_t index)
1585 {
1586 ndbuf_t *ndbuf = self->head;
1587 Py_buffer *base = &ndbuf->base;
1588 char *ptr;
1589
1590 if (base->ndim == 0) {
1591 PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
1592 return NULL;
1593 }
1594
1595 ptr = ptr_from_index(base, index);
1596 if (ptr == NULL)
1597 return NULL;
1598
1599 if (base->ndim == 1) {
1600 return unpack_single(ptr, base->format, base->itemsize);
1601 }
1602 else {
1603 NDArrayObject *nd;
1604 Py_buffer *subview;
1605
1606 nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
1607 if (nd == NULL)
1608 return NULL;
1609
1610 if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
1611 Py_DECREF(nd);
1612 return NULL;
1613 }
1614
1615 subview = &nd->staticbuf.base;
1616
1617 subview->buf = ptr;
1618 subview->len /= subview->shape[0];
1619
1620 subview->ndim--;
1621 subview->shape++;
1622 if (subview->strides) subview->strides++;
1623 if (subview->suboffsets) subview->suboffsets++;
1624
1625 init_flags(&nd->staticbuf);
1626
1627 return (PyObject *)nd;
1628 }
1629 }
1630
1631 /*
1632 For each dimension, we get valid (start, stop, step, slicelength) quadruples
1633 from PySlice_GetIndicesEx().
1634
1635 Slicing NumPy arrays
1636 ====================
1637
1638 A pointer to an element in a NumPy array is defined by:
1639
1640 ptr = (char *)buf + indices[0] * strides[0] +
1641 ... +
1642 indices[ndim-1] * strides[ndim-1]
1643
1644 Adjust buf:
1645 -----------
1646 Adding start[n] for each dimension effectively adds the constant:
1647
1648 c = start[0] * strides[0] + ... + start[ndim-1] * strides[ndim-1]
1649
1650 Therefore init_slice() adds all start[n] directly to buf.
1651
1652 Adjust shape:
1653 -------------
1654 Obviously shape[n] = slicelength[n]
1655
1656 Adjust strides:
1657 ---------------
1658 In the original array, the next element in a dimension is reached
1659 by adding strides[n] to the pointer. In the sliced array, elements
1660 may be skipped, so the next element is reached by adding:
1661
1662 strides[n] * step[n]
1663
1664 Slicing PIL arrays
1665 ==================
1666
1667 Layout:
1668 -------
1669 In the first (zeroth) dimension, PIL arrays have an array of pointers
1670 to sub-arrays of ndim-1. Striding in the first dimension is done by
1671 getting the index of the nth pointer, dereference it and then add a
1672 suboffset to it. The arrays pointed to can best be seen a regular
1673 NumPy arrays.
1674
1675 Adjust buf:
1676 -----------
1677 In the original array, buf points to a location (usually the start)
1678 in the array of pointers. For the sliced array, start[0] can be
1679 added to buf in the same manner as for NumPy arrays.
1680
1681 Adjust suboffsets:
1682 ------------------
1683 Due to the dereferencing step in the addressing scheme, it is not
1684 possible to adjust buf for higher dimensions. Recall that the
1685 sub-arrays pointed to are regular NumPy arrays, so for each of
1686 those arrays adding start[n] effectively adds the constant:
1687
1688 c = start[1] * strides[1] + ... + start[ndim-1] * strides[ndim-1]
1689
1690 This constant is added to suboffsets[0]. suboffsets[0] in turn is
1691 added to each pointer right after dereferencing.
1692
1693 Adjust shape and strides:
1694 -------------------------
1695 Shape and strides are not influenced by the dereferencing step, so
1696 they are adjusted in the same manner as for NumPy arrays.
1697
1698 Multiple levels of suboffsets
1699 =============================
1700
1701 For a construct like an array of pointers to array of pointers to
1702 sub-arrays of ndim-2:
1703
1704 suboffsets[0] = start[1] * strides[1]
1705 suboffsets[1] = start[2] * strides[2] + ...
1706 */
1707 static int
init_slice(Py_buffer * base,PyObject * key,int dim)1708 init_slice(Py_buffer *base, PyObject *key, int dim)
1709 {
1710 Py_ssize_t start, stop, step, slicelength;
1711
1712 if (PySlice_Unpack(key, &start, &stop, &step) < 0) {
1713 return -1;
1714 }
1715 slicelength = PySlice_AdjustIndices(base->shape[dim], &start, &stop, step);
1716
1717
1718 if (base->suboffsets == NULL || dim == 0) {
1719 adjust_buf:
1720 base->buf = (char *)base->buf + base->strides[dim] * start;
1721 }
1722 else {
1723 Py_ssize_t n = dim-1;
1724 while (n >= 0 && base->suboffsets[n] < 0)
1725 n--;
1726 if (n < 0)
1727 goto adjust_buf; /* all suboffsets are negative */
1728 base->suboffsets[n] = base->suboffsets[n] + base->strides[dim] * start;
1729 }
1730 base->shape[dim] = slicelength;
1731 base->strides[dim] = base->strides[dim] * step;
1732
1733 return 0;
1734 }
1735
1736 static int
copy_structure(Py_buffer * base)1737 copy_structure(Py_buffer *base)
1738 {
1739 Py_ssize_t *shape = NULL, *strides = NULL, *suboffsets = NULL;
1740 Py_ssize_t i;
1741
1742 shape = PyMem_Malloc(base->ndim * (sizeof *shape));
1743 strides = PyMem_Malloc(base->ndim * (sizeof *strides));
1744 if (shape == NULL || strides == NULL)
1745 goto err_nomem;
1746
1747 suboffsets = NULL;
1748 if (base->suboffsets) {
1749 suboffsets = PyMem_Malloc(base->ndim * (sizeof *suboffsets));
1750 if (suboffsets == NULL)
1751 goto err_nomem;
1752 }
1753
1754 for (i = 0; i < base->ndim; i++) {
1755 shape[i] = base->shape[i];
1756 strides[i] = base->strides[i];
1757 if (suboffsets)
1758 suboffsets[i] = base->suboffsets[i];
1759 }
1760
1761 base->shape = shape;
1762 base->strides = strides;
1763 base->suboffsets = suboffsets;
1764
1765 return 0;
1766
1767 err_nomem:
1768 PyErr_NoMemory();
1769 PyMem_XFree(shape);
1770 PyMem_XFree(strides);
1771 PyMem_XFree(suboffsets);
1772 return -1;
1773 }
1774
1775 static PyObject *
ndarray_subscript(NDArrayObject * self,PyObject * key)1776 ndarray_subscript(NDArrayObject *self, PyObject *key)
1777 {
1778 NDArrayObject *nd;
1779 ndbuf_t *ndbuf;
1780 Py_buffer *base = &self->head->base;
1781
1782 if (base->ndim == 0) {
1783 if (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0) {
1784 return unpack_single(base->buf, base->format, base->itemsize);
1785 }
1786 else if (key == Py_Ellipsis) {
1787 return Py_NewRef(self);
1788 }
1789 else {
1790 PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
1791 return NULL;
1792 }
1793 }
1794 if (PyIndex_Check(key)) {
1795 Py_ssize_t index = PyLong_AsSsize_t(key);
1796 if (index == -1 && PyErr_Occurred())
1797 return NULL;
1798 return ndarray_item(self, index);
1799 }
1800
1801 nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
1802 if (nd == NULL)
1803 return NULL;
1804
1805 /* new ndarray is a consumer */
1806 if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
1807 Py_DECREF(nd);
1808 return NULL;
1809 }
1810
1811 /* copy shape, strides and suboffsets */
1812 ndbuf = nd->head;
1813 base = &ndbuf->base;
1814 if (copy_structure(base) < 0) {
1815 Py_DECREF(nd);
1816 return NULL;
1817 }
1818 ndbuf->flags |= ND_OWN_ARRAYS;
1819
1820 if (PySlice_Check(key)) {
1821 /* one-dimensional slice */
1822 if (init_slice(base, key, 0) < 0)
1823 goto err_occurred;
1824 }
1825 else if (PyTuple_Check(key)) {
1826 /* multi-dimensional slice */
1827 PyObject *tuple = key;
1828 Py_ssize_t i, n;
1829
1830 n = PyTuple_GET_SIZE(tuple);
1831 for (i = 0; i < n; i++) {
1832 key = PyTuple_GET_ITEM(tuple, i);
1833 if (!PySlice_Check(key))
1834 goto type_error;
1835 if (init_slice(base, key, (int)i) < 0)
1836 goto err_occurred;
1837 }
1838 }
1839 else {
1840 goto type_error;
1841 }
1842
1843 init_len(base);
1844 init_flags(ndbuf);
1845
1846 return (PyObject *)nd;
1847
1848
1849 type_error:
1850 PyErr_Format(PyExc_TypeError,
1851 "cannot index memory using \"%.200s\"",
1852 Py_TYPE(key)->tp_name);
1853 err_occurred:
1854 Py_DECREF(nd);
1855 return NULL;
1856 }
1857
1858
1859 static int
ndarray_ass_subscript(NDArrayObject * self,PyObject * key,PyObject * value)1860 ndarray_ass_subscript(NDArrayObject *self, PyObject *key, PyObject *value)
1861 {
1862 NDArrayObject *nd;
1863 Py_buffer *dest = &self->head->base;
1864 Py_buffer src;
1865 char *ptr;
1866 Py_ssize_t index;
1867 int ret = -1;
1868
1869 if (dest->readonly) {
1870 PyErr_SetString(PyExc_TypeError, "ndarray is not writable");
1871 return -1;
1872 }
1873 if (value == NULL) {
1874 PyErr_SetString(PyExc_TypeError, "ndarray data cannot be deleted");
1875 return -1;
1876 }
1877 if (dest->ndim == 0) {
1878 if (key == Py_Ellipsis ||
1879 (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0)) {
1880 ptr = (char *)dest->buf;
1881 return pack_single(ptr, value, dest->format, dest->itemsize);
1882 }
1883 else {
1884 PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
1885 return -1;
1886 }
1887 }
1888 if (dest->ndim == 1 && PyIndex_Check(key)) {
1889 /* rvalue must be a single item */
1890 index = PyLong_AsSsize_t(key);
1891 if (index == -1 && PyErr_Occurred())
1892 return -1;
1893 else {
1894 ptr = ptr_from_index(dest, index);
1895 if (ptr == NULL)
1896 return -1;
1897 }
1898 return pack_single(ptr, value, dest->format, dest->itemsize);
1899 }
1900
1901 /* rvalue must be an exporter */
1902 if (PyObject_GetBuffer(value, &src, PyBUF_FULL_RO) == -1)
1903 return -1;
1904
1905 nd = (NDArrayObject *)ndarray_subscript(self, key);
1906 if (nd != NULL) {
1907 dest = &nd->head->base;
1908 ret = copy_buffer(dest, &src);
1909 Py_DECREF(nd);
1910 }
1911
1912 PyBuffer_Release(&src);
1913 return ret;
1914 }
1915
1916 static PyObject *
slice_indices(PyObject * self,PyObject * args)1917 slice_indices(PyObject *self, PyObject *args)
1918 {
1919 PyObject *ret, *key, *tmp;
1920 Py_ssize_t s[4]; /* start, stop, step, slicelength */
1921 Py_ssize_t i, len;
1922
1923 if (!PyArg_ParseTuple(args, "On", &key, &len)) {
1924 return NULL;
1925 }
1926 if (!PySlice_Check(key)) {
1927 PyErr_SetString(PyExc_TypeError,
1928 "first argument must be a slice object");
1929 return NULL;
1930 }
1931 if (PySlice_Unpack(key, &s[0], &s[1], &s[2]) < 0) {
1932 return NULL;
1933 }
1934 s[3] = PySlice_AdjustIndices(len, &s[0], &s[1], s[2]);
1935
1936 ret = PyTuple_New(4);
1937 if (ret == NULL)
1938 return NULL;
1939
1940 for (i = 0; i < 4; i++) {
1941 tmp = PyLong_FromSsize_t(s[i]);
1942 if (tmp == NULL)
1943 goto error;
1944 PyTuple_SET_ITEM(ret, i, tmp);
1945 }
1946
1947 return ret;
1948
1949 error:
1950 Py_DECREF(ret);
1951 return NULL;
1952 }
1953
1954
1955 static PyMappingMethods ndarray_as_mapping = {
1956 NULL, /* mp_length */
1957 (binaryfunc)ndarray_subscript, /* mp_subscript */
1958 (objobjargproc)ndarray_ass_subscript /* mp_ass_subscript */
1959 };
1960
1961 static PySequenceMethods ndarray_as_sequence = {
1962 0, /* sq_length */
1963 0, /* sq_concat */
1964 0, /* sq_repeat */
1965 (ssizeargfunc)ndarray_item, /* sq_item */
1966 };
1967
1968
1969 /**************************************************************************/
1970 /* getters */
1971 /**************************************************************************/
1972
1973 static PyObject *
ssize_array_as_tuple(Py_ssize_t * array,Py_ssize_t len)1974 ssize_array_as_tuple(Py_ssize_t *array, Py_ssize_t len)
1975 {
1976 PyObject *tuple, *x;
1977 Py_ssize_t i;
1978
1979 if (array == NULL)
1980 return PyTuple_New(0);
1981
1982 tuple = PyTuple_New(len);
1983 if (tuple == NULL)
1984 return NULL;
1985
1986 for (i = 0; i < len; i++) {
1987 x = PyLong_FromSsize_t(array[i]);
1988 if (x == NULL) {
1989 Py_DECREF(tuple);
1990 return NULL;
1991 }
1992 PyTuple_SET_ITEM(tuple, i, x);
1993 }
1994
1995 return tuple;
1996 }
1997
1998 static PyObject *
ndarray_get_flags(NDArrayObject * self,void * closure)1999 ndarray_get_flags(NDArrayObject *self, void *closure)
2000 {
2001 return PyLong_FromLong(self->head->flags);
2002 }
2003
2004 static PyObject *
ndarray_get_offset(NDArrayObject * self,void * closure)2005 ndarray_get_offset(NDArrayObject *self, void *closure)
2006 {
2007 ndbuf_t *ndbuf = self->head;
2008 return PyLong_FromSsize_t(ndbuf->offset);
2009 }
2010
2011 static PyObject *
ndarray_get_obj(NDArrayObject * self,void * closure)2012 ndarray_get_obj(NDArrayObject *self, void *closure)
2013 {
2014 Py_buffer *base = &self->head->base;
2015
2016 if (base->obj == NULL) {
2017 Py_RETURN_NONE;
2018 }
2019 return Py_NewRef(base->obj);
2020 }
2021
2022 static PyObject *
ndarray_get_nbytes(NDArrayObject * self,void * closure)2023 ndarray_get_nbytes(NDArrayObject *self, void *closure)
2024 {
2025 Py_buffer *base = &self->head->base;
2026 return PyLong_FromSsize_t(base->len);
2027 }
2028
2029 static PyObject *
ndarray_get_readonly(NDArrayObject * self,void * closure)2030 ndarray_get_readonly(NDArrayObject *self, void *closure)
2031 {
2032 Py_buffer *base = &self->head->base;
2033 return PyBool_FromLong(base->readonly);
2034 }
2035
2036 static PyObject *
ndarray_get_itemsize(NDArrayObject * self,void * closure)2037 ndarray_get_itemsize(NDArrayObject *self, void *closure)
2038 {
2039 Py_buffer *base = &self->head->base;
2040 return PyLong_FromSsize_t(base->itemsize);
2041 }
2042
2043 static PyObject *
ndarray_get_format(NDArrayObject * self,void * closure)2044 ndarray_get_format(NDArrayObject *self, void *closure)
2045 {
2046 Py_buffer *base = &self->head->base;
2047 const char *fmt = base->format ? base->format : "";
2048 return PyUnicode_FromString(fmt);
2049 }
2050
2051 static PyObject *
ndarray_get_ndim(NDArrayObject * self,void * closure)2052 ndarray_get_ndim(NDArrayObject *self, void *closure)
2053 {
2054 Py_buffer *base = &self->head->base;
2055 return PyLong_FromSsize_t(base->ndim);
2056 }
2057
2058 static PyObject *
ndarray_get_shape(NDArrayObject * self,void * closure)2059 ndarray_get_shape(NDArrayObject *self, void *closure)
2060 {
2061 Py_buffer *base = &self->head->base;
2062 return ssize_array_as_tuple(base->shape, base->ndim);
2063 }
2064
2065 static PyObject *
ndarray_get_strides(NDArrayObject * self,void * closure)2066 ndarray_get_strides(NDArrayObject *self, void *closure)
2067 {
2068 Py_buffer *base = &self->head->base;
2069 return ssize_array_as_tuple(base->strides, base->ndim);
2070 }
2071
2072 static PyObject *
ndarray_get_suboffsets(NDArrayObject * self,void * closure)2073 ndarray_get_suboffsets(NDArrayObject *self, void *closure)
2074 {
2075 Py_buffer *base = &self->head->base;
2076 return ssize_array_as_tuple(base->suboffsets, base->ndim);
2077 }
2078
2079 static PyObject *
ndarray_c_contig(PyObject * self,PyObject * dummy)2080 ndarray_c_contig(PyObject *self, PyObject *dummy)
2081 {
2082 NDArrayObject *nd = (NDArrayObject *)self;
2083 int ret = PyBuffer_IsContiguous(&nd->head->base, 'C');
2084
2085 if (ret != ND_C_CONTIGUOUS(nd->head->flags)) {
2086 PyErr_SetString(PyExc_RuntimeError,
2087 "results from PyBuffer_IsContiguous() and flags differ");
2088 return NULL;
2089 }
2090 return PyBool_FromLong(ret);
2091 }
2092
2093 static PyObject *
ndarray_fortran_contig(PyObject * self,PyObject * dummy)2094 ndarray_fortran_contig(PyObject *self, PyObject *dummy)
2095 {
2096 NDArrayObject *nd = (NDArrayObject *)self;
2097 int ret = PyBuffer_IsContiguous(&nd->head->base, 'F');
2098
2099 if (ret != ND_FORTRAN_CONTIGUOUS(nd->head->flags)) {
2100 PyErr_SetString(PyExc_RuntimeError,
2101 "results from PyBuffer_IsContiguous() and flags differ");
2102 return NULL;
2103 }
2104 return PyBool_FromLong(ret);
2105 }
2106
2107 static PyObject *
ndarray_contig(PyObject * self,PyObject * dummy)2108 ndarray_contig(PyObject *self, PyObject *dummy)
2109 {
2110 NDArrayObject *nd = (NDArrayObject *)self;
2111 int ret = PyBuffer_IsContiguous(&nd->head->base, 'A');
2112
2113 if (ret != ND_ANY_CONTIGUOUS(nd->head->flags)) {
2114 PyErr_SetString(PyExc_RuntimeError,
2115 "results from PyBuffer_IsContiguous() and flags differ");
2116 return NULL;
2117 }
2118 return PyBool_FromLong(ret);
2119 }
2120
2121
2122 static PyGetSetDef ndarray_getset [] =
2123 {
2124 /* ndbuf */
2125 { "flags", (getter)ndarray_get_flags, NULL, NULL, NULL},
2126 { "offset", (getter)ndarray_get_offset, NULL, NULL, NULL},
2127 /* ndbuf.base */
2128 { "obj", (getter)ndarray_get_obj, NULL, NULL, NULL},
2129 { "nbytes", (getter)ndarray_get_nbytes, NULL, NULL, NULL},
2130 { "readonly", (getter)ndarray_get_readonly, NULL, NULL, NULL},
2131 { "itemsize", (getter)ndarray_get_itemsize, NULL, NULL, NULL},
2132 { "format", (getter)ndarray_get_format, NULL, NULL, NULL},
2133 { "ndim", (getter)ndarray_get_ndim, NULL, NULL, NULL},
2134 { "shape", (getter)ndarray_get_shape, NULL, NULL, NULL},
2135 { "strides", (getter)ndarray_get_strides, NULL, NULL, NULL},
2136 { "suboffsets", (getter)ndarray_get_suboffsets, NULL, NULL, NULL},
2137 { "c_contiguous", (getter)ndarray_c_contig, NULL, NULL, NULL},
2138 { "f_contiguous", (getter)ndarray_fortran_contig, NULL, NULL, NULL},
2139 { "contiguous", (getter)ndarray_contig, NULL, NULL, NULL},
2140 {NULL}
2141 };
2142
2143 static PyObject *
ndarray_tolist(PyObject * self,PyObject * dummy)2144 ndarray_tolist(PyObject *self, PyObject *dummy)
2145 {
2146 return ndarray_as_list((NDArrayObject *)self);
2147 }
2148
2149 static PyObject *
ndarray_tobytes(PyObject * self,PyObject * dummy)2150 ndarray_tobytes(PyObject *self, PyObject *dummy)
2151 {
2152 ndbuf_t *ndbuf = ((NDArrayObject *)self)->head;
2153 Py_buffer *src = &ndbuf->base;
2154 Py_buffer dest;
2155 PyObject *ret = NULL;
2156 char *mem;
2157
2158 if (ND_C_CONTIGUOUS(ndbuf->flags))
2159 return PyBytes_FromStringAndSize(src->buf, src->len);
2160
2161 assert(src->shape != NULL);
2162 assert(src->strides != NULL);
2163 assert(src->ndim > 0);
2164
2165 mem = PyMem_Malloc(src->len);
2166 if (mem == NULL) {
2167 PyErr_NoMemory();
2168 return NULL;
2169 }
2170
2171 dest = *src;
2172 dest.buf = mem;
2173 dest.suboffsets = NULL;
2174 dest.strides = strides_from_shape(ndbuf, 0);
2175 if (dest.strides == NULL)
2176 goto out;
2177 if (copy_buffer(&dest, src) < 0)
2178 goto out;
2179
2180 ret = PyBytes_FromStringAndSize(mem, src->len);
2181
2182 out:
2183 PyMem_XFree(dest.strides);
2184 PyMem_Free(mem);
2185 return ret;
2186 }
2187
2188 /* add redundant (negative) suboffsets for testing */
2189 static PyObject *
ndarray_add_suboffsets(PyObject * self,PyObject * dummy)2190 ndarray_add_suboffsets(PyObject *self, PyObject *dummy)
2191 {
2192 NDArrayObject *nd = (NDArrayObject *)self;
2193 Py_buffer *base = &nd->head->base;
2194 Py_ssize_t i;
2195
2196 if (base->suboffsets != NULL) {
2197 PyErr_SetString(PyExc_TypeError,
2198 "cannot add suboffsets to PIL-style array");
2199 return NULL;
2200 }
2201 if (base->strides == NULL) {
2202 PyErr_SetString(PyExc_TypeError,
2203 "cannot add suboffsets to array without strides");
2204 return NULL;
2205 }
2206
2207 base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
2208 if (base->suboffsets == NULL) {
2209 PyErr_NoMemory();
2210 return NULL;
2211 }
2212
2213 for (i = 0; i < base->ndim; i++)
2214 base->suboffsets[i] = -1;
2215
2216 nd->head->flags &= ~(ND_C|ND_FORTRAN);
2217
2218 Py_RETURN_NONE;
2219 }
2220
2221 /* Test PyMemoryView_FromBuffer(): return a memoryview from a static buffer.
2222 Obviously this is fragile and only one such view may be active at any
2223 time. Never use anything like this in real code! */
2224 static char *infobuf = NULL;
2225 static PyObject *
ndarray_memoryview_from_buffer(PyObject * self,PyObject * dummy)2226 ndarray_memoryview_from_buffer(PyObject *self, PyObject *dummy)
2227 {
2228 const NDArrayObject *nd = (NDArrayObject *)self;
2229 const Py_buffer *view = &nd->head->base;
2230 const ndbuf_t *ndbuf;
2231 static char format[ND_MAX_NDIM+1];
2232 static Py_ssize_t shape[ND_MAX_NDIM];
2233 static Py_ssize_t strides[ND_MAX_NDIM];
2234 static Py_ssize_t suboffsets[ND_MAX_NDIM];
2235 static Py_buffer info;
2236 char *p;
2237
2238 if (!ND_IS_CONSUMER(nd))
2239 ndbuf = nd->head; /* self is ndarray/original exporter */
2240 else if (NDArray_Check(view->obj) && !ND_IS_CONSUMER(view->obj))
2241 /* self is ndarray and consumer from ndarray/original exporter */
2242 ndbuf = ((NDArrayObject *)view->obj)->head;
2243 else {
2244 PyErr_SetString(PyExc_TypeError,
2245 "memoryview_from_buffer(): ndarray must be original exporter or "
2246 "consumer from ndarray/original exporter");
2247 return NULL;
2248 }
2249
2250 info = *view;
2251 p = PyMem_Realloc(infobuf, ndbuf->len);
2252 if (p == NULL) {
2253 PyMem_Free(infobuf);
2254 PyErr_NoMemory();
2255 infobuf = NULL;
2256 return NULL;
2257 }
2258 else {
2259 infobuf = p;
2260 }
2261 /* copy the complete raw data */
2262 memcpy(infobuf, ndbuf->data, ndbuf->len);
2263 info.buf = infobuf + ((char *)view->buf - ndbuf->data);
2264
2265 if (view->format) {
2266 if (strlen(view->format) > ND_MAX_NDIM) {
2267 PyErr_Format(PyExc_TypeError,
2268 "memoryview_from_buffer: format is limited to %d characters",
2269 ND_MAX_NDIM);
2270 return NULL;
2271 }
2272 strcpy(format, view->format);
2273 info.format = format;
2274 }
2275 if (view->ndim > ND_MAX_NDIM) {
2276 PyErr_Format(PyExc_TypeError,
2277 "memoryview_from_buffer: ndim is limited to %d", ND_MAX_NDIM);
2278 return NULL;
2279 }
2280 if (view->shape) {
2281 memcpy(shape, view->shape, view->ndim * sizeof(Py_ssize_t));
2282 info.shape = shape;
2283 }
2284 if (view->strides) {
2285 memcpy(strides, view->strides, view->ndim * sizeof(Py_ssize_t));
2286 info.strides = strides;
2287 }
2288 if (view->suboffsets) {
2289 memcpy(suboffsets, view->suboffsets, view->ndim * sizeof(Py_ssize_t));
2290 info.suboffsets = suboffsets;
2291 }
2292
2293 return PyMemoryView_FromBuffer(&info);
2294 }
2295
2296 /* Get a single item from bufobj at the location specified by seq.
2297 seq is a list or tuple of indices. The purpose of this function
2298 is to check other functions against PyBuffer_GetPointer(). */
2299 static PyObject *
get_pointer(PyObject * self,PyObject * args)2300 get_pointer(PyObject *self, PyObject *args)
2301 {
2302 PyObject *ret = NULL, *bufobj, *seq;
2303 Py_buffer view;
2304 Py_ssize_t indices[ND_MAX_NDIM];
2305 Py_ssize_t i;
2306 void *ptr;
2307
2308 if (!PyArg_ParseTuple(args, "OO", &bufobj, &seq)) {
2309 return NULL;
2310 }
2311
2312 CHECK_LIST_OR_TUPLE(seq);
2313 if (PyObject_GetBuffer(bufobj, &view, PyBUF_FULL_RO) < 0)
2314 return NULL;
2315
2316 if (view.ndim > ND_MAX_NDIM) {
2317 PyErr_Format(PyExc_ValueError,
2318 "get_pointer(): ndim > %d", ND_MAX_NDIM);
2319 goto out;
2320 }
2321 if (PySequence_Fast_GET_SIZE(seq) != view.ndim) {
2322 PyErr_SetString(PyExc_ValueError,
2323 "get_pointer(): len(indices) != ndim");
2324 goto out;
2325 }
2326
2327 for (i = 0; i < view.ndim; i++) {
2328 PyObject *x = PySequence_Fast_GET_ITEM(seq, i);
2329 indices[i] = PyLong_AsSsize_t(x);
2330 if (PyErr_Occurred())
2331 goto out;
2332 if (indices[i] < 0 || indices[i] >= view.shape[i]) {
2333 PyErr_Format(PyExc_ValueError,
2334 "get_pointer(): invalid index %zd at position %zd",
2335 indices[i], i);
2336 goto out;
2337 }
2338 }
2339
2340 ptr = PyBuffer_GetPointer(&view, indices);
2341 ret = unpack_single(ptr, view.format, view.itemsize);
2342
2343 out:
2344 PyBuffer_Release(&view);
2345 return ret;
2346 }
2347
2348 static PyObject *
get_sizeof_void_p(PyObject * self,PyObject * Py_UNUSED (ignored))2349 get_sizeof_void_p(PyObject *self, PyObject *Py_UNUSED(ignored))
2350 {
2351 return PyLong_FromSize_t(sizeof(void *));
2352 }
2353
2354 static char
get_ascii_order(PyObject * order)2355 get_ascii_order(PyObject *order)
2356 {
2357 PyObject *ascii_order;
2358 char ord;
2359
2360 if (!PyUnicode_Check(order)) {
2361 PyErr_SetString(PyExc_TypeError,
2362 "order must be a string");
2363 return CHAR_MAX;
2364 }
2365
2366 ascii_order = PyUnicode_AsASCIIString(order);
2367 if (ascii_order == NULL) {
2368 return CHAR_MAX;
2369 }
2370
2371 ord = PyBytes_AS_STRING(ascii_order)[0];
2372 Py_DECREF(ascii_order);
2373
2374 if (ord != 'C' && ord != 'F' && ord != 'A') {
2375 PyErr_SetString(PyExc_ValueError,
2376 "invalid order, must be C, F or A");
2377 return CHAR_MAX;
2378 }
2379
2380 return ord;
2381 }
2382
2383 /* Get a contiguous memoryview. */
2384 static PyObject *
get_contiguous(PyObject * self,PyObject * args)2385 get_contiguous(PyObject *self, PyObject *args)
2386 {
2387 PyObject *obj;
2388 PyObject *buffertype;
2389 PyObject *order;
2390 long type;
2391 char ord;
2392
2393 if (!PyArg_ParseTuple(args, "OOO", &obj, &buffertype, &order)) {
2394 return NULL;
2395 }
2396
2397 if (!PyLong_Check(buffertype)) {
2398 PyErr_SetString(PyExc_TypeError,
2399 "buffertype must be PyBUF_READ or PyBUF_WRITE");
2400 return NULL;
2401 }
2402
2403 type = PyLong_AsLong(buffertype);
2404 if (type == -1 && PyErr_Occurred()) {
2405 return NULL;
2406 }
2407 if (type != PyBUF_READ && type != PyBUF_WRITE) {
2408 PyErr_SetString(PyExc_ValueError,
2409 "invalid buffer type");
2410 return NULL;
2411 }
2412
2413 ord = get_ascii_order(order);
2414 if (ord == CHAR_MAX)
2415 return NULL;
2416
2417 return PyMemoryView_GetContiguous(obj, (int)type, ord);
2418 }
2419
2420 /* PyBuffer_ToContiguous() */
2421 static PyObject *
py_buffer_to_contiguous(PyObject * self,PyObject * args)2422 py_buffer_to_contiguous(PyObject *self, PyObject *args)
2423 {
2424 PyObject *obj;
2425 PyObject *order;
2426 PyObject *ret = NULL;
2427 int flags;
2428 char ord;
2429 Py_buffer view;
2430 char *buf = NULL;
2431
2432 if (!PyArg_ParseTuple(args, "OOi", &obj, &order, &flags)) {
2433 return NULL;
2434 }
2435
2436 if (PyObject_GetBuffer(obj, &view, flags) < 0) {
2437 return NULL;
2438 }
2439
2440 ord = get_ascii_order(order);
2441 if (ord == CHAR_MAX) {
2442 goto out;
2443 }
2444
2445 buf = PyMem_Malloc(view.len);
2446 if (buf == NULL) {
2447 PyErr_NoMemory();
2448 goto out;
2449 }
2450
2451 if (PyBuffer_ToContiguous(buf, &view, view.len, ord) < 0) {
2452 goto out;
2453 }
2454
2455 ret = PyBytes_FromStringAndSize(buf, view.len);
2456
2457 out:
2458 PyBuffer_Release(&view);
2459 PyMem_XFree(buf);
2460 return ret;
2461 }
2462
2463 static int
fmtcmp(const char * fmt1,const char * fmt2)2464 fmtcmp(const char *fmt1, const char *fmt2)
2465 {
2466 if (fmt1 == NULL) {
2467 return fmt2 == NULL || strcmp(fmt2, "B") == 0;
2468 }
2469 if (fmt2 == NULL) {
2470 return fmt1 == NULL || strcmp(fmt1, "B") == 0;
2471 }
2472 return strcmp(fmt1, fmt2) == 0;
2473 }
2474
2475 static int
arraycmp(const Py_ssize_t * a1,const Py_ssize_t * a2,const Py_ssize_t * shape,Py_ssize_t ndim)2476 arraycmp(const Py_ssize_t *a1, const Py_ssize_t *a2, const Py_ssize_t *shape,
2477 Py_ssize_t ndim)
2478 {
2479 Py_ssize_t i;
2480
2481
2482 for (i = 0; i < ndim; i++) {
2483 if (shape && shape[i] <= 1) {
2484 /* strides can differ if the dimension is less than 2 */
2485 continue;
2486 }
2487 if (a1[i] != a2[i]) {
2488 return 0;
2489 }
2490 }
2491
2492 return 1;
2493 }
2494
2495 /* Compare two contiguous buffers for physical equality. */
2496 static PyObject *
cmp_contig(PyObject * self,PyObject * args)2497 cmp_contig(PyObject *self, PyObject *args)
2498 {
2499 PyObject *b1, *b2; /* buffer objects */
2500 Py_buffer v1, v2;
2501 PyObject *ret;
2502 int equal = 0;
2503
2504 if (!PyArg_ParseTuple(args, "OO", &b1, &b2)) {
2505 return NULL;
2506 }
2507
2508 if (PyObject_GetBuffer(b1, &v1, PyBUF_FULL_RO) < 0) {
2509 PyErr_SetString(PyExc_TypeError,
2510 "cmp_contig: first argument does not implement the buffer "
2511 "protocol");
2512 return NULL;
2513 }
2514 if (PyObject_GetBuffer(b2, &v2, PyBUF_FULL_RO) < 0) {
2515 PyErr_SetString(PyExc_TypeError,
2516 "cmp_contig: second argument does not implement the buffer "
2517 "protocol");
2518 PyBuffer_Release(&v1);
2519 return NULL;
2520 }
2521
2522 if (!(PyBuffer_IsContiguous(&v1, 'C')&&PyBuffer_IsContiguous(&v2, 'C')) &&
2523 !(PyBuffer_IsContiguous(&v1, 'F')&&PyBuffer_IsContiguous(&v2, 'F'))) {
2524 goto result;
2525 }
2526
2527 /* readonly may differ if created from non-contiguous */
2528 if (v1.len != v2.len ||
2529 v1.itemsize != v2.itemsize ||
2530 v1.ndim != v2.ndim ||
2531 !fmtcmp(v1.format, v2.format) ||
2532 !!v1.shape != !!v2.shape ||
2533 !!v1.strides != !!v2.strides ||
2534 !!v1.suboffsets != !!v2.suboffsets) {
2535 goto result;
2536 }
2537
2538 if ((v1.shape && !arraycmp(v1.shape, v2.shape, NULL, v1.ndim)) ||
2539 (v1.strides && !arraycmp(v1.strides, v2.strides, v1.shape, v1.ndim)) ||
2540 (v1.suboffsets && !arraycmp(v1.suboffsets, v2.suboffsets, NULL,
2541 v1.ndim))) {
2542 goto result;
2543 }
2544
2545 if (memcmp((char *)v1.buf, (char *)v2.buf, v1.len) != 0) {
2546 goto result;
2547 }
2548
2549 equal = 1;
2550
2551 result:
2552 PyBuffer_Release(&v1);
2553 PyBuffer_Release(&v2);
2554
2555 ret = equal ? Py_True : Py_False;
2556 return Py_NewRef(ret);
2557 }
2558
2559 static PyObject *
is_contiguous(PyObject * self,PyObject * args)2560 is_contiguous(PyObject *self, PyObject *args)
2561 {
2562 PyObject *obj;
2563 PyObject *order;
2564 PyObject *ret = NULL;
2565 Py_buffer view, *base;
2566 char ord;
2567
2568 if (!PyArg_ParseTuple(args, "OO", &obj, &order)) {
2569 return NULL;
2570 }
2571
2572 ord = get_ascii_order(order);
2573 if (ord == CHAR_MAX) {
2574 return NULL;
2575 }
2576
2577 if (NDArray_Check(obj)) {
2578 /* Skip the buffer protocol to check simple etc. buffers directly. */
2579 base = &((NDArrayObject *)obj)->head->base;
2580 ret = PyBuffer_IsContiguous(base, ord) ? Py_True : Py_False;
2581 }
2582 else {
2583 if (PyObject_GetBuffer(obj, &view, PyBUF_FULL_RO) < 0) {
2584 PyErr_SetString(PyExc_TypeError,
2585 "is_contiguous: object does not implement the buffer "
2586 "protocol");
2587 return NULL;
2588 }
2589 ret = PyBuffer_IsContiguous(&view, ord) ? Py_True : Py_False;
2590 PyBuffer_Release(&view);
2591 }
2592
2593 return Py_NewRef(ret);
2594 }
2595
2596 static Py_hash_t
ndarray_hash(PyObject * self)2597 ndarray_hash(PyObject *self)
2598 {
2599 const NDArrayObject *nd = (NDArrayObject *)self;
2600 const Py_buffer *view = &nd->head->base;
2601 PyObject *bytes;
2602 Py_hash_t hash;
2603
2604 if (!view->readonly) {
2605 PyErr_SetString(PyExc_ValueError,
2606 "cannot hash writable ndarray object");
2607 return -1;
2608 }
2609 if (view->obj != NULL && PyObject_Hash(view->obj) == -1) {
2610 return -1;
2611 }
2612
2613 bytes = ndarray_tobytes(self, NULL);
2614 if (bytes == NULL) {
2615 return -1;
2616 }
2617
2618 hash = PyObject_Hash(bytes);
2619 Py_DECREF(bytes);
2620 return hash;
2621 }
2622
2623
2624 static PyMethodDef ndarray_methods [] =
2625 {
2626 { "tolist", ndarray_tolist, METH_NOARGS, NULL },
2627 { "tobytes", ndarray_tobytes, METH_NOARGS, NULL },
2628 { "push", _PyCFunction_CAST(ndarray_push), METH_VARARGS|METH_KEYWORDS, NULL },
2629 { "pop", ndarray_pop, METH_NOARGS, NULL },
2630 { "add_suboffsets", ndarray_add_suboffsets, METH_NOARGS, NULL },
2631 { "memoryview_from_buffer", ndarray_memoryview_from_buffer, METH_NOARGS, NULL },
2632 {NULL}
2633 };
2634
2635 static PyTypeObject NDArray_Type = {
2636 PyVarObject_HEAD_INIT(NULL, 0)
2637 "ndarray", /* Name of this type */
2638 sizeof(NDArrayObject), /* Basic object size */
2639 0, /* Item size for varobject */
2640 (destructor)ndarray_dealloc, /* tp_dealloc */
2641 0, /* tp_vectorcall_offset */
2642 0, /* tp_getattr */
2643 0, /* tp_setattr */
2644 0, /* tp_as_async */
2645 0, /* tp_repr */
2646 0, /* tp_as_number */
2647 &ndarray_as_sequence, /* tp_as_sequence */
2648 &ndarray_as_mapping, /* tp_as_mapping */
2649 (hashfunc)ndarray_hash, /* tp_hash */
2650 0, /* tp_call */
2651 0, /* tp_str */
2652 PyObject_GenericGetAttr, /* tp_getattro */
2653 0, /* tp_setattro */
2654 &ndarray_as_buffer, /* tp_as_buffer */
2655 Py_TPFLAGS_DEFAULT, /* tp_flags */
2656 0, /* tp_doc */
2657 0, /* tp_traverse */
2658 0, /* tp_clear */
2659 0, /* tp_richcompare */
2660 0, /* tp_weaklistoffset */
2661 0, /* tp_iter */
2662 0, /* tp_iternext */
2663 ndarray_methods, /* tp_methods */
2664 0, /* tp_members */
2665 ndarray_getset, /* tp_getset */
2666 0, /* tp_base */
2667 0, /* tp_dict */
2668 0, /* tp_descr_get */
2669 0, /* tp_descr_set */
2670 0, /* tp_dictoffset */
2671 ndarray_init, /* tp_init */
2672 0, /* tp_alloc */
2673 ndarray_new, /* tp_new */
2674 };
2675
2676 /**************************************************************************/
2677 /* StaticArray Object */
2678 /**************************************************************************/
2679
2680 static PyTypeObject StaticArray_Type;
2681
2682 typedef struct {
2683 PyObject_HEAD
2684 int legacy_mode; /* if true, use the view.obj==NULL hack */
2685 } StaticArrayObject;
2686
2687 static char static_mem[12] = {0,1,2,3,4,5,6,7,8,9,10,11};
2688 static Py_ssize_t static_shape[1] = {12};
2689 static Py_ssize_t static_strides[1] = {1};
2690 static Py_buffer static_buffer = {
2691 static_mem, /* buf */
2692 NULL, /* obj */
2693 12, /* len */
2694 1, /* itemsize */
2695 1, /* readonly */
2696 1, /* ndim */
2697 "B", /* format */
2698 static_shape, /* shape */
2699 static_strides, /* strides */
2700 NULL, /* suboffsets */
2701 NULL /* internal */
2702 };
2703
2704 static PyObject *
staticarray_new(PyTypeObject * type,PyObject * args,PyObject * kwds)2705 staticarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
2706 {
2707 return (PyObject *)PyObject_New(StaticArrayObject, &StaticArray_Type);
2708 }
2709
2710 static int
staticarray_init(PyObject * self,PyObject * args,PyObject * kwds)2711 staticarray_init(PyObject *self, PyObject *args, PyObject *kwds)
2712 {
2713 StaticArrayObject *a = (StaticArrayObject *)self;
2714 static char *kwlist[] = {
2715 "legacy_mode", NULL
2716 };
2717 PyObject *legacy_mode = Py_False;
2718
2719 if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O", kwlist, &legacy_mode))
2720 return -1;
2721
2722 a->legacy_mode = (legacy_mode != Py_False);
2723 return 0;
2724 }
2725
2726 static void
staticarray_dealloc(StaticArrayObject * self)2727 staticarray_dealloc(StaticArrayObject *self)
2728 {
2729 PyObject_Free(self);
2730 }
2731
2732 /* Return a buffer for a PyBUF_FULL_RO request. Flags are not checked,
2733 which makes this object a non-compliant exporter! */
2734 static int
staticarray_getbuf(StaticArrayObject * self,Py_buffer * view,int flags)2735 staticarray_getbuf(StaticArrayObject *self, Py_buffer *view, int flags)
2736 {
2737 *view = static_buffer;
2738
2739 if (self->legacy_mode) {
2740 view->obj = NULL; /* Don't use this in new code. */
2741 }
2742 else {
2743 view->obj = Py_NewRef(self);
2744 }
2745
2746 return 0;
2747 }
2748
2749 static PyBufferProcs staticarray_as_buffer = {
2750 (getbufferproc)staticarray_getbuf, /* bf_getbuffer */
2751 NULL, /* bf_releasebuffer */
2752 };
2753
2754 static PyTypeObject StaticArray_Type = {
2755 PyVarObject_HEAD_INIT(NULL, 0)
2756 "staticarray", /* Name of this type */
2757 sizeof(StaticArrayObject), /* Basic object size */
2758 0, /* Item size for varobject */
2759 (destructor)staticarray_dealloc, /* tp_dealloc */
2760 0, /* tp_vectorcall_offset */
2761 0, /* tp_getattr */
2762 0, /* tp_setattr */
2763 0, /* tp_as_async */
2764 0, /* tp_repr */
2765 0, /* tp_as_number */
2766 0, /* tp_as_sequence */
2767 0, /* tp_as_mapping */
2768 0, /* tp_hash */
2769 0, /* tp_call */
2770 0, /* tp_str */
2771 0, /* tp_getattro */
2772 0, /* tp_setattro */
2773 &staticarray_as_buffer, /* tp_as_buffer */
2774 Py_TPFLAGS_DEFAULT, /* tp_flags */
2775 0, /* tp_doc */
2776 0, /* tp_traverse */
2777 0, /* tp_clear */
2778 0, /* tp_richcompare */
2779 0, /* tp_weaklistoffset */
2780 0, /* tp_iter */
2781 0, /* tp_iternext */
2782 0, /* tp_methods */
2783 0, /* tp_members */
2784 0, /* tp_getset */
2785 0, /* tp_base */
2786 0, /* tp_dict */
2787 0, /* tp_descr_get */
2788 0, /* tp_descr_set */
2789 0, /* tp_dictoffset */
2790 staticarray_init, /* tp_init */
2791 0, /* tp_alloc */
2792 staticarray_new, /* tp_new */
2793 };
2794
2795
2796 static struct PyMethodDef _testbuffer_functions[] = {
2797 {"slice_indices", slice_indices, METH_VARARGS, NULL},
2798 {"get_pointer", get_pointer, METH_VARARGS, NULL},
2799 {"get_sizeof_void_p", get_sizeof_void_p, METH_NOARGS, NULL},
2800 {"get_contiguous", get_contiguous, METH_VARARGS, NULL},
2801 {"py_buffer_to_contiguous", py_buffer_to_contiguous, METH_VARARGS, NULL},
2802 {"is_contiguous", is_contiguous, METH_VARARGS, NULL},
2803 {"cmp_contig", cmp_contig, METH_VARARGS, NULL},
2804 {NULL, NULL}
2805 };
2806
2807 static struct PyModuleDef _testbuffermodule = {
2808 PyModuleDef_HEAD_INIT,
2809 "_testbuffer",
2810 NULL,
2811 -1,
2812 _testbuffer_functions,
2813 NULL,
2814 NULL,
2815 NULL,
2816 NULL
2817 };
2818
2819 static int
_testbuffer_exec(PyObject * mod)2820 _testbuffer_exec(PyObject *mod)
2821 {
2822 Py_SET_TYPE(&NDArray_Type, &PyType_Type);
2823 if (PyType_Ready(&NDArray_Type)) {
2824 return -1;
2825 }
2826 if (PyModule_AddType(mod, &NDArray_Type) < 0) {
2827 return -1;
2828 }
2829
2830 Py_SET_TYPE(&StaticArray_Type, &PyType_Type);
2831 if (PyModule_AddType(mod, &StaticArray_Type) < 0) {
2832 return -1;
2833 }
2834
2835 structmodule = PyImport_ImportModule("struct");
2836 if (structmodule == NULL) {
2837 return -1;
2838 }
2839
2840 Struct = PyObject_GetAttrString(structmodule, "Struct");
2841 if (Struct == NULL) {
2842 return -1;
2843 }
2844 calcsize = PyObject_GetAttrString(structmodule, "calcsize");
2845 if (calcsize == NULL) {
2846 return -1;
2847 }
2848
2849 simple_format = PyUnicode_FromString(simple_fmt);
2850 if (simple_format == NULL) {
2851 return -1;
2852 }
2853
2854 #define ADD_INT_MACRO(mod, macro) \
2855 do { \
2856 if (PyModule_AddIntConstant(mod, #macro, macro) < 0) { \
2857 return -1; \
2858 } \
2859 } while (0)
2860
2861 ADD_INT_MACRO(mod, ND_MAX_NDIM);
2862 ADD_INT_MACRO(mod, ND_VAREXPORT);
2863 ADD_INT_MACRO(mod, ND_WRITABLE);
2864 ADD_INT_MACRO(mod, ND_FORTRAN);
2865 ADD_INT_MACRO(mod, ND_SCALAR);
2866 ADD_INT_MACRO(mod, ND_PIL);
2867 ADD_INT_MACRO(mod, ND_GETBUF_FAIL);
2868 ADD_INT_MACRO(mod, ND_GETBUF_UNDEFINED);
2869 ADD_INT_MACRO(mod, ND_REDIRECT);
2870
2871 ADD_INT_MACRO(mod, PyBUF_SIMPLE);
2872 ADD_INT_MACRO(mod, PyBUF_WRITABLE);
2873 ADD_INT_MACRO(mod, PyBUF_FORMAT);
2874 ADD_INT_MACRO(mod, PyBUF_ND);
2875 ADD_INT_MACRO(mod, PyBUF_STRIDES);
2876 ADD_INT_MACRO(mod, PyBUF_INDIRECT);
2877 ADD_INT_MACRO(mod, PyBUF_C_CONTIGUOUS);
2878 ADD_INT_MACRO(mod, PyBUF_F_CONTIGUOUS);
2879 ADD_INT_MACRO(mod, PyBUF_ANY_CONTIGUOUS);
2880 ADD_INT_MACRO(mod, PyBUF_FULL);
2881 ADD_INT_MACRO(mod, PyBUF_FULL_RO);
2882 ADD_INT_MACRO(mod, PyBUF_RECORDS);
2883 ADD_INT_MACRO(mod, PyBUF_RECORDS_RO);
2884 ADD_INT_MACRO(mod, PyBUF_STRIDED);
2885 ADD_INT_MACRO(mod, PyBUF_STRIDED_RO);
2886 ADD_INT_MACRO(mod, PyBUF_CONTIG);
2887 ADD_INT_MACRO(mod, PyBUF_CONTIG_RO);
2888
2889 ADD_INT_MACRO(mod, PyBUF_READ);
2890 ADD_INT_MACRO(mod, PyBUF_WRITE);
2891
2892 #undef ADD_INT_MACRO
2893
2894 return 0;
2895 }
2896
2897 PyMODINIT_FUNC
PyInit__testbuffer(void)2898 PyInit__testbuffer(void)
2899 {
2900 PyObject *mod = PyModule_Create(&_testbuffermodule);
2901 if (mod == NULL) {
2902 return NULL;
2903 }
2904 #ifdef Py_GIL_DISABLED
2905 PyUnstable_Module_SetGIL(mod, Py_MOD_GIL_NOT_USED);
2906 #endif
2907 if (_testbuffer_exec(mod) < 0) {
2908 Py_DECREF(mod);
2909 return NULL;
2910 }
2911 return mod;
2912 }
2913