You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2875 lines
81 KiB

  1. /* C Extension module to test all aspects of PEP-3118.
  2. Written by Stefan Krah. */
  3. #define PY_SSIZE_T_CLEAN
  4. #include "Python.h"
  5. /* struct module */
  6. PyObject *structmodule = NULL;
  7. PyObject *Struct = NULL;
  8. PyObject *calcsize = NULL;
  9. /* cache simple format string */
  10. static const char *simple_fmt = "B";
  11. PyObject *simple_format = NULL;
  12. #define SIMPLE_FORMAT(fmt) (fmt == NULL || strcmp(fmt, "B") == 0)
  13. #define FIX_FORMAT(fmt) (fmt == NULL ? "B" : fmt)
  14. /**************************************************************************/
  15. /* NDArray Object */
  16. /**************************************************************************/
  17. static PyTypeObject NDArray_Type;
  18. #define NDArray_Check(v) (Py_TYPE(v) == &NDArray_Type)
  19. #define CHECK_LIST_OR_TUPLE(v) \
  20. if (!PyList_Check(v) && !PyTuple_Check(v)) { \
  21. PyErr_SetString(PyExc_TypeError, \
  22. #v " must be a list or a tuple"); \
  23. return NULL; \
  24. } \
  25. #define PyMem_XFree(v) \
  26. do { if (v) PyMem_Free(v); } while (0)
  27. /* Maximum number of dimensions. */
  28. #define ND_MAX_NDIM (2 * PyBUF_MAX_NDIM)
  29. /* Check for the presence of suboffsets in the first dimension. */
  30. #define HAVE_PTR(suboffsets) (suboffsets && suboffsets[0] >= 0)
  31. /* Adjust ptr if suboffsets are present. */
  32. #define ADJUST_PTR(ptr, suboffsets) \
  33. (HAVE_PTR(suboffsets) ? *((char**)ptr) + suboffsets[0] : ptr)
  34. /* Default: NumPy style (strides), read-only, no var-export, C-style layout */
  35. #define ND_DEFAULT 0x000
  36. /* User configurable flags for the ndarray */
  37. #define ND_VAREXPORT 0x001 /* change layout while buffers are exported */
  38. /* User configurable flags for each base buffer */
  39. #define ND_WRITABLE 0x002 /* mark base buffer as writable */
  40. #define ND_FORTRAN 0x004 /* Fortran contiguous layout */
  41. #define ND_SCALAR 0x008 /* scalar: ndim = 0 */
  42. #define ND_PIL 0x010 /* convert to PIL-style array (suboffsets) */
  43. #define ND_REDIRECT 0x020 /* redirect buffer requests */
  44. #define ND_GETBUF_FAIL 0x040 /* trigger getbuffer failure */
  45. #define ND_GETBUF_UNDEFINED 0x080 /* undefined view.obj */
  46. /* Internal flags for the base buffer */
  47. #define ND_C 0x100 /* C contiguous layout (default) */
  48. #define ND_OWN_ARRAYS 0x200 /* consumer owns arrays */
  49. /* ndarray properties */
  50. #define ND_IS_CONSUMER(nd) \
  51. (((NDArrayObject *)nd)->head == &((NDArrayObject *)nd)->staticbuf)
  52. /* ndbuf->flags properties */
  53. #define ND_C_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C)))
  54. #define ND_FORTRAN_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_FORTRAN)))
  55. #define ND_ANY_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C|ND_FORTRAN)))
  56. /* getbuffer() requests */
  57. #define REQ_INDIRECT(flags) ((flags&PyBUF_INDIRECT) == PyBUF_INDIRECT)
  58. #define REQ_C_CONTIGUOUS(flags) ((flags&PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS)
  59. #define REQ_F_CONTIGUOUS(flags) ((flags&PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS)
  60. #define REQ_ANY_CONTIGUOUS(flags) ((flags&PyBUF_ANY_CONTIGUOUS) == PyBUF_ANY_CONTIGUOUS)
  61. #define REQ_STRIDES(flags) ((flags&PyBUF_STRIDES) == PyBUF_STRIDES)
  62. #define REQ_SHAPE(flags) ((flags&PyBUF_ND) == PyBUF_ND)
  63. #define REQ_WRITABLE(flags) (flags&PyBUF_WRITABLE)
  64. #define REQ_FORMAT(flags) (flags&PyBUF_FORMAT)
  65. /* Single node of a list of base buffers. The list is needed to implement
  66. changes in memory layout while exported buffers are active. */
  67. static PyTypeObject NDArray_Type;
  68. struct ndbuf;
  69. typedef struct ndbuf {
  70. struct ndbuf *next;
  71. struct ndbuf *prev;
  72. Py_ssize_t len; /* length of data */
  73. Py_ssize_t offset; /* start of the array relative to data */
  74. char *data; /* raw data */
  75. int flags; /* capabilities of the base buffer */
  76. Py_ssize_t exports; /* number of exports */
  77. Py_buffer base; /* base buffer */
  78. } ndbuf_t;
  79. typedef struct {
  80. PyObject_HEAD
  81. int flags; /* ndarray flags */
  82. ndbuf_t staticbuf; /* static buffer for re-exporting mode */
  83. ndbuf_t *head; /* currently active base buffer */
  84. } NDArrayObject;
  85. static ndbuf_t *
  86. ndbuf_new(Py_ssize_t nitems, Py_ssize_t itemsize, Py_ssize_t offset, int flags)
  87. {
  88. ndbuf_t *ndbuf;
  89. Py_buffer *base;
  90. Py_ssize_t len;
  91. len = nitems * itemsize;
  92. if (offset % itemsize) {
  93. PyErr_SetString(PyExc_ValueError,
  94. "offset must be a multiple of itemsize");
  95. return NULL;
  96. }
  97. if (offset < 0 || offset+itemsize > len) {
  98. PyErr_SetString(PyExc_ValueError, "offset out of bounds");
  99. return NULL;
  100. }
  101. ndbuf = PyMem_Malloc(sizeof *ndbuf);
  102. if (ndbuf == NULL) {
  103. PyErr_NoMemory();
  104. return NULL;
  105. }
  106. ndbuf->next = NULL;
  107. ndbuf->prev = NULL;
  108. ndbuf->len = len;
  109. ndbuf->offset= offset;
  110. ndbuf->data = PyMem_Malloc(len);
  111. if (ndbuf->data == NULL) {
  112. PyErr_NoMemory();
  113. PyMem_Free(ndbuf);
  114. return NULL;
  115. }
  116. ndbuf->flags = flags;
  117. ndbuf->exports = 0;
  118. base = &ndbuf->base;
  119. base->obj = NULL;
  120. base->buf = ndbuf->data;
  121. base->len = len;
  122. base->itemsize = 1;
  123. base->readonly = 0;
  124. base->format = NULL;
  125. base->ndim = 1;
  126. base->shape = NULL;
  127. base->strides = NULL;
  128. base->suboffsets = NULL;
  129. base->internal = ndbuf;
  130. return ndbuf;
  131. }
  132. static void
  133. ndbuf_free(ndbuf_t *ndbuf)
  134. {
  135. Py_buffer *base = &ndbuf->base;
  136. PyMem_XFree(ndbuf->data);
  137. PyMem_XFree(base->format);
  138. PyMem_XFree(base->shape);
  139. PyMem_XFree(base->strides);
  140. PyMem_XFree(base->suboffsets);
  141. PyMem_Free(ndbuf);
  142. }
  143. static void
  144. ndbuf_push(NDArrayObject *nd, ndbuf_t *elt)
  145. {
  146. elt->next = nd->head;
  147. if (nd->head) nd->head->prev = elt;
  148. nd->head = elt;
  149. elt->prev = NULL;
  150. }
  151. static void
  152. ndbuf_delete(NDArrayObject *nd, ndbuf_t *elt)
  153. {
  154. if (elt->prev)
  155. elt->prev->next = elt->next;
  156. else
  157. nd->head = elt->next;
  158. if (elt->next)
  159. elt->next->prev = elt->prev;
  160. ndbuf_free(elt);
  161. }
  162. static void
  163. ndbuf_pop(NDArrayObject *nd)
  164. {
  165. ndbuf_delete(nd, nd->head);
  166. }
  167. static PyObject *
  168. ndarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
  169. {
  170. NDArrayObject *nd;
  171. nd = PyObject_New(NDArrayObject, &NDArray_Type);
  172. if (nd == NULL)
  173. return NULL;
  174. nd->flags = 0;
  175. nd->head = NULL;
  176. return (PyObject *)nd;
  177. }
  178. static void
  179. ndarray_dealloc(NDArrayObject *self)
  180. {
  181. if (self->head) {
  182. if (ND_IS_CONSUMER(self)) {
  183. Py_buffer *base = &self->head->base;
  184. if (self->head->flags & ND_OWN_ARRAYS) {
  185. PyMem_XFree(base->shape);
  186. PyMem_XFree(base->strides);
  187. PyMem_XFree(base->suboffsets);
  188. }
  189. PyBuffer_Release(base);
  190. }
  191. else {
  192. while (self->head)
  193. ndbuf_pop(self);
  194. }
  195. }
  196. PyObject_Del(self);
  197. }
  198. static int
  199. ndarray_init_staticbuf(PyObject *exporter, NDArrayObject *nd, int flags)
  200. {
  201. Py_buffer *base = &nd->staticbuf.base;
  202. if (PyObject_GetBuffer(exporter, base, flags) < 0)
  203. return -1;
  204. nd->head = &nd->staticbuf;
  205. nd->head->next = NULL;
  206. nd->head->prev = NULL;
  207. nd->head->len = -1;
  208. nd->head->offset = -1;
  209. nd->head->data = NULL;
  210. nd->head->flags = base->readonly ? 0 : ND_WRITABLE;
  211. nd->head->exports = 0;
  212. return 0;
  213. }
  214. static void
  215. init_flags(ndbuf_t *ndbuf)
  216. {
  217. if (ndbuf->base.ndim == 0)
  218. ndbuf->flags |= ND_SCALAR;
  219. if (ndbuf->base.suboffsets)
  220. ndbuf->flags |= ND_PIL;
  221. if (PyBuffer_IsContiguous(&ndbuf->base, 'C'))
  222. ndbuf->flags |= ND_C;
  223. if (PyBuffer_IsContiguous(&ndbuf->base, 'F'))
  224. ndbuf->flags |= ND_FORTRAN;
  225. }
  226. /****************************************************************************/
  227. /* Buffer/List conversions */
  228. /****************************************************************************/
  229. static Py_ssize_t *strides_from_shape(const ndbuf_t *, int flags);
  230. /* Get number of members in a struct: see issue #12740 */
  231. typedef struct {
  232. PyObject_HEAD
  233. Py_ssize_t s_size;
  234. Py_ssize_t s_len;
  235. } PyPartialStructObject;
  236. static Py_ssize_t
  237. get_nmemb(PyObject *s)
  238. {
  239. return ((PyPartialStructObject *)s)->s_len;
  240. }
  241. /* Pack all items into the buffer of 'obj'. The 'format' parameter must be
  242. in struct module syntax. For standard C types, a single item is an integer.
  243. For compound types, a single item is a tuple of integers. */
  244. static int
  245. pack_from_list(PyObject *obj, PyObject *items, PyObject *format,
  246. Py_ssize_t itemsize)
  247. {
  248. PyObject *structobj, *pack_into;
  249. PyObject *args, *offset;
  250. PyObject *item, *tmp;
  251. Py_ssize_t nitems; /* number of items */
  252. Py_ssize_t nmemb; /* number of members in a single item */
  253. Py_ssize_t i, j;
  254. int ret = 0;
  255. assert(PyObject_CheckBuffer(obj));
  256. assert(PyList_Check(items) || PyTuple_Check(items));
  257. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  258. if (structobj == NULL)
  259. return -1;
  260. nitems = PySequence_Fast_GET_SIZE(items);
  261. nmemb = get_nmemb(structobj);
  262. assert(nmemb >= 1);
  263. pack_into = PyObject_GetAttrString(structobj, "pack_into");
  264. if (pack_into == NULL) {
  265. Py_DECREF(structobj);
  266. return -1;
  267. }
  268. /* nmemb >= 1 */
  269. args = PyTuple_New(2 + nmemb);
  270. if (args == NULL) {
  271. Py_DECREF(pack_into);
  272. Py_DECREF(structobj);
  273. return -1;
  274. }
  275. offset = NULL;
  276. for (i = 0; i < nitems; i++) {
  277. /* Loop invariant: args[j] are borrowed references or NULL. */
  278. PyTuple_SET_ITEM(args, 0, obj);
  279. for (j = 1; j < 2+nmemb; j++)
  280. PyTuple_SET_ITEM(args, j, NULL);
  281. Py_XDECREF(offset);
  282. offset = PyLong_FromSsize_t(i*itemsize);
  283. if (offset == NULL) {
  284. ret = -1;
  285. break;
  286. }
  287. PyTuple_SET_ITEM(args, 1, offset);
  288. item = PySequence_Fast_GET_ITEM(items, i);
  289. if ((PyBytes_Check(item) || PyLong_Check(item) ||
  290. PyFloat_Check(item)) && nmemb == 1) {
  291. PyTuple_SET_ITEM(args, 2, item);
  292. }
  293. else if ((PyList_Check(item) || PyTuple_Check(item)) &&
  294. PySequence_Length(item) == nmemb) {
  295. for (j = 0; j < nmemb; j++) {
  296. tmp = PySequence_Fast_GET_ITEM(item, j);
  297. PyTuple_SET_ITEM(args, 2+j, tmp);
  298. }
  299. }
  300. else {
  301. PyErr_SetString(PyExc_ValueError,
  302. "mismatch between initializer element and format string");
  303. ret = -1;
  304. break;
  305. }
  306. tmp = PyObject_CallObject(pack_into, args);
  307. if (tmp == NULL) {
  308. ret = -1;
  309. break;
  310. }
  311. Py_DECREF(tmp);
  312. }
  313. Py_INCREF(obj); /* args[0] */
  314. /* args[1]: offset is either NULL or should be dealloc'd */
  315. for (i = 2; i < 2+nmemb; i++) {
  316. tmp = PyTuple_GET_ITEM(args, i);
  317. Py_XINCREF(tmp);
  318. }
  319. Py_DECREF(args);
  320. Py_DECREF(pack_into);
  321. Py_DECREF(structobj);
  322. return ret;
  323. }
  324. /* Pack single element */
  325. static int
  326. pack_single(char *ptr, PyObject *item, const char *fmt, Py_ssize_t itemsize)
  327. {
  328. PyObject *structobj = NULL, *pack_into = NULL, *args = NULL;
  329. PyObject *format = NULL, *mview = NULL, *zero = NULL;
  330. Py_ssize_t i, nmemb;
  331. int ret = -1;
  332. PyObject *x;
  333. if (fmt == NULL) fmt = "B";
  334. format = PyUnicode_FromString(fmt);
  335. if (format == NULL)
  336. goto out;
  337. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  338. if (structobj == NULL)
  339. goto out;
  340. nmemb = get_nmemb(structobj);
  341. assert(nmemb >= 1);
  342. mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_WRITE);
  343. if (mview == NULL)
  344. goto out;
  345. zero = PyLong_FromLong(0);
  346. if (zero == NULL)
  347. goto out;
  348. pack_into = PyObject_GetAttrString(structobj, "pack_into");
  349. if (pack_into == NULL)
  350. goto out;
  351. args = PyTuple_New(2+nmemb);
  352. if (args == NULL)
  353. goto out;
  354. PyTuple_SET_ITEM(args, 0, mview);
  355. PyTuple_SET_ITEM(args, 1, zero);
  356. if ((PyBytes_Check(item) || PyLong_Check(item) ||
  357. PyFloat_Check(item)) && nmemb == 1) {
  358. PyTuple_SET_ITEM(args, 2, item);
  359. }
  360. else if ((PyList_Check(item) || PyTuple_Check(item)) &&
  361. PySequence_Length(item) == nmemb) {
  362. for (i = 0; i < nmemb; i++) {
  363. x = PySequence_Fast_GET_ITEM(item, i);
  364. PyTuple_SET_ITEM(args, 2+i, x);
  365. }
  366. }
  367. else {
  368. PyErr_SetString(PyExc_ValueError,
  369. "mismatch between initializer element and format string");
  370. goto args_out;
  371. }
  372. x = PyObject_CallObject(pack_into, args);
  373. if (x != NULL) {
  374. Py_DECREF(x);
  375. ret = 0;
  376. }
  377. args_out:
  378. for (i = 0; i < 2+nmemb; i++)
  379. Py_XINCREF(PyTuple_GET_ITEM(args, i));
  380. Py_XDECREF(args);
  381. out:
  382. Py_XDECREF(pack_into);
  383. Py_XDECREF(zero);
  384. Py_XDECREF(mview);
  385. Py_XDECREF(structobj);
  386. Py_XDECREF(format);
  387. return ret;
  388. }
  389. static void
  390. copy_rec(const Py_ssize_t *shape, Py_ssize_t ndim, Py_ssize_t itemsize,
  391. char *dptr, const Py_ssize_t *dstrides, const Py_ssize_t *dsuboffsets,
  392. char *sptr, const Py_ssize_t *sstrides, const Py_ssize_t *ssuboffsets,
  393. char *mem)
  394. {
  395. Py_ssize_t i;
  396. assert(ndim >= 1);
  397. if (ndim == 1) {
  398. if (!HAVE_PTR(dsuboffsets) && !HAVE_PTR(ssuboffsets) &&
  399. dstrides[0] == itemsize && sstrides[0] == itemsize) {
  400. memmove(dptr, sptr, shape[0] * itemsize);
  401. }
  402. else {
  403. char *p;
  404. assert(mem != NULL);
  405. for (i=0, p=mem; i<shape[0]; p+=itemsize, sptr+=sstrides[0], i++) {
  406. char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
  407. memcpy(p, xsptr, itemsize);
  408. }
  409. for (i=0, p=mem; i<shape[0]; p+=itemsize, dptr+=dstrides[0], i++) {
  410. char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
  411. memcpy(xdptr, p, itemsize);
  412. }
  413. }
  414. return;
  415. }
  416. for (i = 0; i < shape[0]; dptr+=dstrides[0], sptr+=sstrides[0], i++) {
  417. char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
  418. char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
  419. copy_rec(shape+1, ndim-1, itemsize,
  420. xdptr, dstrides+1, dsuboffsets ? dsuboffsets+1 : NULL,
  421. xsptr, sstrides+1, ssuboffsets ? ssuboffsets+1 : NULL,
  422. mem);
  423. }
  424. }
  425. static int
  426. cmp_structure(Py_buffer *dest, Py_buffer *src)
  427. {
  428. Py_ssize_t i;
  429. if (strcmp(FIX_FORMAT(dest->format), FIX_FORMAT(src->format)) != 0 ||
  430. dest->itemsize != src->itemsize ||
  431. dest->ndim != src->ndim)
  432. return -1;
  433. for (i = 0; i < dest->ndim; i++) {
  434. if (dest->shape[i] != src->shape[i])
  435. return -1;
  436. if (dest->shape[i] == 0)
  437. break;
  438. }
  439. return 0;
  440. }
  441. /* Copy src to dest. Both buffers must have the same format, itemsize,
  442. ndim and shape. Copying is atomic, the function never fails with
  443. a partial copy. */
  444. static int
  445. copy_buffer(Py_buffer *dest, Py_buffer *src)
  446. {
  447. char *mem = NULL;
  448. assert(dest->ndim > 0);
  449. if (cmp_structure(dest, src) < 0) {
  450. PyErr_SetString(PyExc_ValueError,
  451. "ndarray assignment: lvalue and rvalue have different structures");
  452. return -1;
  453. }
  454. if ((dest->suboffsets && dest->suboffsets[dest->ndim-1] >= 0) ||
  455. (src->suboffsets && src->suboffsets[src->ndim-1] >= 0) ||
  456. dest->strides[dest->ndim-1] != dest->itemsize ||
  457. src->strides[src->ndim-1] != src->itemsize) {
  458. mem = PyMem_Malloc(dest->shape[dest->ndim-1] * dest->itemsize);
  459. if (mem == NULL) {
  460. PyErr_NoMemory();
  461. return -1;
  462. }
  463. }
  464. copy_rec(dest->shape, dest->ndim, dest->itemsize,
  465. dest->buf, dest->strides, dest->suboffsets,
  466. src->buf, src->strides, src->suboffsets,
  467. mem);
  468. PyMem_XFree(mem);
  469. return 0;
  470. }
  471. /* Unpack single element */
  472. static PyObject *
  473. unpack_single(char *ptr, const char *fmt, Py_ssize_t itemsize)
  474. {
  475. PyObject *x, *unpack_from, *mview;
  476. if (fmt == NULL) {
  477. fmt = "B";
  478. itemsize = 1;
  479. }
  480. unpack_from = PyObject_GetAttrString(structmodule, "unpack_from");
  481. if (unpack_from == NULL)
  482. return NULL;
  483. mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_READ);
  484. if (mview == NULL) {
  485. Py_DECREF(unpack_from);
  486. return NULL;
  487. }
  488. x = PyObject_CallFunction(unpack_from, "sO", fmt, mview);
  489. Py_DECREF(unpack_from);
  490. Py_DECREF(mview);
  491. if (x == NULL)
  492. return NULL;
  493. if (PyTuple_GET_SIZE(x) == 1) {
  494. PyObject *tmp = PyTuple_GET_ITEM(x, 0);
  495. Py_INCREF(tmp);
  496. Py_DECREF(x);
  497. return tmp;
  498. }
  499. return x;
  500. }
  501. /* Unpack a multi-dimensional matrix into a nested list. Return a scalar
  502. for ndim = 0. */
  503. static PyObject *
  504. unpack_rec(PyObject *unpack_from, char *ptr, PyObject *mview, char *item,
  505. const Py_ssize_t *shape, const Py_ssize_t *strides,
  506. const Py_ssize_t *suboffsets, Py_ssize_t ndim, Py_ssize_t itemsize)
  507. {
  508. PyObject *lst, *x;
  509. Py_ssize_t i;
  510. assert(ndim >= 0);
  511. assert(shape != NULL);
  512. assert(strides != NULL);
  513. if (ndim == 0) {
  514. memcpy(item, ptr, itemsize);
  515. x = PyObject_CallFunctionObjArgs(unpack_from, mview, NULL);
  516. if (x == NULL)
  517. return NULL;
  518. if (PyTuple_GET_SIZE(x) == 1) {
  519. PyObject *tmp = PyTuple_GET_ITEM(x, 0);
  520. Py_INCREF(tmp);
  521. Py_DECREF(x);
  522. return tmp;
  523. }
  524. return x;
  525. }
  526. lst = PyList_New(shape[0]);
  527. if (lst == NULL)
  528. return NULL;
  529. for (i = 0; i < shape[0]; ptr+=strides[0], i++) {
  530. char *nextptr = ADJUST_PTR(ptr, suboffsets);
  531. x = unpack_rec(unpack_from, nextptr, mview, item,
  532. shape+1, strides+1, suboffsets ? suboffsets+1 : NULL,
  533. ndim-1, itemsize);
  534. if (x == NULL) {
  535. Py_DECREF(lst);
  536. return NULL;
  537. }
  538. PyList_SET_ITEM(lst, i, x);
  539. }
  540. return lst;
  541. }
  542. static PyObject *
  543. ndarray_as_list(NDArrayObject *nd)
  544. {
  545. PyObject *structobj = NULL, *unpack_from = NULL;
  546. PyObject *lst = NULL, *mview = NULL;
  547. Py_buffer *base = &nd->head->base;
  548. Py_ssize_t *shape = base->shape;
  549. Py_ssize_t *strides = base->strides;
  550. Py_ssize_t simple_shape[1];
  551. Py_ssize_t simple_strides[1];
  552. char *item = NULL;
  553. PyObject *format;
  554. char *fmt = base->format;
  555. base = &nd->head->base;
  556. if (fmt == NULL) {
  557. PyErr_SetString(PyExc_ValueError,
  558. "ndarray: tolist() does not support format=NULL, use "
  559. "tobytes()");
  560. return NULL;
  561. }
  562. if (shape == NULL) {
  563. assert(ND_C_CONTIGUOUS(nd->head->flags));
  564. assert(base->strides == NULL);
  565. assert(base->ndim <= 1);
  566. shape = simple_shape;
  567. shape[0] = base->len;
  568. strides = simple_strides;
  569. strides[0] = base->itemsize;
  570. }
  571. else if (strides == NULL) {
  572. assert(ND_C_CONTIGUOUS(nd->head->flags));
  573. strides = strides_from_shape(nd->head, 0);
  574. if (strides == NULL)
  575. return NULL;
  576. }
  577. format = PyUnicode_FromString(fmt);
  578. if (format == NULL)
  579. goto out;
  580. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  581. Py_DECREF(format);
  582. if (structobj == NULL)
  583. goto out;
  584. unpack_from = PyObject_GetAttrString(structobj, "unpack_from");
  585. if (unpack_from == NULL)
  586. goto out;
  587. item = PyMem_Malloc(base->itemsize);
  588. if (item == NULL) {
  589. PyErr_NoMemory();
  590. goto out;
  591. }
  592. mview = PyMemoryView_FromMemory(item, base->itemsize, PyBUF_WRITE);
  593. if (mview == NULL)
  594. goto out;
  595. lst = unpack_rec(unpack_from, base->buf, mview, item,
  596. shape, strides, base->suboffsets,
  597. base->ndim, base->itemsize);
  598. out:
  599. Py_XDECREF(mview);
  600. PyMem_XFree(item);
  601. Py_XDECREF(unpack_from);
  602. Py_XDECREF(structobj);
  603. if (strides != base->strides && strides != simple_strides)
  604. PyMem_XFree(strides);
  605. return lst;
  606. }
  607. /****************************************************************************/
  608. /* Initialize ndbuf */
  609. /****************************************************************************/
  610. /*
  611. State of a new ndbuf during initialization. 'OK' means that initialization
  612. is complete. 'PTR' means that a pointer has been initialized, but the
  613. state of the memory is still undefined and ndbuf->offset is disregarded.
  614. +-----------------+-----------+-------------+----------------+
  615. | | ndbuf_new | init_simple | init_structure |
  616. +-----------------+-----------+-------------+----------------+
  617. | next | OK (NULL) | OK | OK |
  618. +-----------------+-----------+-------------+----------------+
  619. | prev | OK (NULL) | OK | OK |
  620. +-----------------+-----------+-------------+----------------+
  621. | len | OK | OK | OK |
  622. +-----------------+-----------+-------------+----------------+
  623. | offset | OK | OK | OK |
  624. +-----------------+-----------+-------------+----------------+
  625. | data | PTR | OK | OK |
  626. +-----------------+-----------+-------------+----------------+
  627. | flags | user | user | OK |
  628. +-----------------+-----------+-------------+----------------+
  629. | exports | OK (0) | OK | OK |
  630. +-----------------+-----------+-------------+----------------+
  631. | base.obj | OK (NULL) | OK | OK |
  632. +-----------------+-----------+-------------+----------------+
  633. | base.buf | PTR | PTR | OK |
  634. +-----------------+-----------+-------------+----------------+
  635. | base.len | len(data) | len(data) | OK |
  636. +-----------------+-----------+-------------+----------------+
  637. | base.itemsize | 1 | OK | OK |
  638. +-----------------+-----------+-------------+----------------+
  639. | base.readonly | 0 | OK | OK |
  640. +-----------------+-----------+-------------+----------------+
  641. | base.format | NULL | OK | OK |
  642. +-----------------+-----------+-------------+----------------+
  643. | base.ndim | 1 | 1 | OK |
  644. +-----------------+-----------+-------------+----------------+
  645. | base.shape | NULL | NULL | OK |
  646. +-----------------+-----------+-------------+----------------+
  647. | base.strides | NULL | NULL | OK |
  648. +-----------------+-----------+-------------+----------------+
  649. | base.suboffsets | NULL | NULL | OK |
  650. +-----------------+-----------+-------------+----------------+
  651. | base.internal | OK | OK | OK |
  652. +-----------------+-----------+-------------+----------------+
  653. */
  654. static Py_ssize_t
  655. get_itemsize(PyObject *format)
  656. {
  657. PyObject *tmp;
  658. Py_ssize_t itemsize;
  659. tmp = PyObject_CallFunctionObjArgs(calcsize, format, NULL);
  660. if (tmp == NULL)
  661. return -1;
  662. itemsize = PyLong_AsSsize_t(tmp);
  663. Py_DECREF(tmp);
  664. return itemsize;
  665. }
  666. static char *
  667. get_format(PyObject *format)
  668. {
  669. PyObject *tmp;
  670. char *fmt;
  671. tmp = PyUnicode_AsASCIIString(format);
  672. if (tmp == NULL)
  673. return NULL;
  674. fmt = PyMem_Malloc(PyBytes_GET_SIZE(tmp)+1);
  675. if (fmt == NULL) {
  676. PyErr_NoMemory();
  677. Py_DECREF(tmp);
  678. return NULL;
  679. }
  680. strcpy(fmt, PyBytes_AS_STRING(tmp));
  681. Py_DECREF(tmp);
  682. return fmt;
  683. }
  684. static int
  685. init_simple(ndbuf_t *ndbuf, PyObject *items, PyObject *format,
  686. Py_ssize_t itemsize)
  687. {
  688. PyObject *mview;
  689. Py_buffer *base = &ndbuf->base;
  690. int ret;
  691. mview = PyMemoryView_FromBuffer(base);
  692. if (mview == NULL)
  693. return -1;
  694. ret = pack_from_list(mview, items, format, itemsize);
  695. Py_DECREF(mview);
  696. if (ret < 0)
  697. return -1;
  698. base->readonly = !(ndbuf->flags & ND_WRITABLE);
  699. base->itemsize = itemsize;
  700. base->format = get_format(format);
  701. if (base->format == NULL)
  702. return -1;
  703. return 0;
  704. }
  705. static Py_ssize_t *
  706. seq_as_ssize_array(PyObject *seq, Py_ssize_t len, int is_shape)
  707. {
  708. Py_ssize_t *dest;
  709. Py_ssize_t x, i;
  710. dest = PyMem_Malloc(len * (sizeof *dest));
  711. if (dest == NULL) {
  712. PyErr_NoMemory();
  713. return NULL;
  714. }
  715. for (i = 0; i < len; i++) {
  716. PyObject *tmp = PySequence_Fast_GET_ITEM(seq, i);
  717. if (!PyLong_Check(tmp)) {
  718. PyErr_Format(PyExc_ValueError,
  719. "elements of %s must be integers",
  720. is_shape ? "shape" : "strides");
  721. PyMem_Free(dest);
  722. return NULL;
  723. }
  724. x = PyLong_AsSsize_t(tmp);
  725. if (PyErr_Occurred()) {
  726. PyMem_Free(dest);
  727. return NULL;
  728. }
  729. if (is_shape && x < 0) {
  730. PyErr_Format(PyExc_ValueError,
  731. "elements of shape must be integers >= 0");
  732. PyMem_Free(dest);
  733. return NULL;
  734. }
  735. dest[i] = x;
  736. }
  737. return dest;
  738. }
  739. static Py_ssize_t *
  740. strides_from_shape(const ndbuf_t *ndbuf, int flags)
  741. {
  742. const Py_buffer *base = &ndbuf->base;
  743. Py_ssize_t *s, i;
  744. s = PyMem_Malloc(base->ndim * (sizeof *s));
  745. if (s == NULL) {
  746. PyErr_NoMemory();
  747. return NULL;
  748. }
  749. if (flags & ND_FORTRAN) {
  750. s[0] = base->itemsize;
  751. for (i = 1; i < base->ndim; i++)
  752. s[i] = s[i-1] * base->shape[i-1];
  753. }
  754. else {
  755. s[base->ndim-1] = base->itemsize;
  756. for (i = base->ndim-2; i >= 0; i--)
  757. s[i] = s[i+1] * base->shape[i+1];
  758. }
  759. return s;
  760. }
  761. /* Bounds check:
  762. len := complete length of allocated memory
  763. offset := start of the array
  764. A single array element is indexed by:
  765. i = indices[0] * strides[0] + indices[1] * strides[1] + ...
  766. imin is reached when all indices[n] combined with positive strides are 0
  767. and all indices combined with negative strides are shape[n]-1, which is
  768. the maximum index for the nth dimension.
  769. imax is reached when all indices[n] combined with negative strides are 0
  770. and all indices combined with positive strides are shape[n]-1.
  771. */
  772. static int
  773. verify_structure(Py_ssize_t len, Py_ssize_t itemsize, Py_ssize_t offset,
  774. const Py_ssize_t *shape, const Py_ssize_t *strides,
  775. Py_ssize_t ndim)
  776. {
  777. Py_ssize_t imin, imax;
  778. Py_ssize_t n;
  779. assert(ndim >= 0);
  780. if (ndim == 0 && (offset < 0 || offset+itemsize > len))
  781. goto invalid_combination;
  782. for (n = 0; n < ndim; n++)
  783. if (strides[n] % itemsize) {
  784. PyErr_SetString(PyExc_ValueError,
  785. "strides must be a multiple of itemsize");
  786. return -1;
  787. }
  788. for (n = 0; n < ndim; n++)
  789. if (shape[n] == 0)
  790. return 0;
  791. imin = imax = 0;
  792. for (n = 0; n < ndim; n++)
  793. if (strides[n] <= 0)
  794. imin += (shape[n]-1) * strides[n];
  795. else
  796. imax += (shape[n]-1) * strides[n];
  797. if (imin + offset < 0 || imax + offset + itemsize > len)
  798. goto invalid_combination;
  799. return 0;
  800. invalid_combination:
  801. PyErr_SetString(PyExc_ValueError,
  802. "invalid combination of buffer, shape and strides");
  803. return -1;
  804. }
  805. /*
  806. Convert a NumPy-style array to an array using suboffsets to stride in
  807. the first dimension. Requirements: ndim > 0.
  808. Contiguous example
  809. ==================
  810. Input:
  811. ------
  812. shape = {2, 2, 3};
  813. strides = {6, 3, 1};
  814. suboffsets = NULL;
  815. data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  816. buf = &data[0]
  817. Output:
  818. -------
  819. shape = {2, 2, 3};
  820. strides = {sizeof(char *), 3, 1};
  821. suboffsets = {0, -1, -1};
  822. data = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  823. | | ^ ^
  824. `---'---' |
  825. | |
  826. `---------------------'
  827. buf = &data[0]
  828. So, in the example the input resembles the three-dimensional array
  829. char v[2][2][3], while the output resembles an array of two pointers
  830. to two-dimensional arrays: char (*v[2])[2][3].
  831. Non-contiguous example:
  832. =======================
  833. Input (with offset and negative strides):
  834. -----------------------------------------
  835. shape = {2, 2, 3};
  836. strides = {-6, 3, -1};
  837. offset = 8
  838. suboffsets = NULL;
  839. data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  840. Output:
  841. -------
  842. shape = {2, 2, 3};
  843. strides = {-sizeof(char *), 3, -1};
  844. suboffsets = {2, -1, -1};
  845. newdata = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  846. | | ^ ^ ^ ^
  847. `---'---' | | `- p2+suboffsets[0]
  848. | `-----------|--- p1+suboffsets[0]
  849. `---------------------'
  850. buf = &newdata[1] # striding backwards over the pointers.
  851. suboffsets[0] is the same as the offset that one would specify if
  852. the two {2, 3} subarrays were created directly, hence the name.
  853. */
  854. static int
  855. init_suboffsets(ndbuf_t *ndbuf)
  856. {
  857. Py_buffer *base = &ndbuf->base;
  858. Py_ssize_t start, step;
  859. Py_ssize_t imin, suboffset0;
  860. Py_ssize_t addsize;
  861. Py_ssize_t n;
  862. char *data;
  863. assert(base->ndim > 0);
  864. assert(base->suboffsets == NULL);
  865. /* Allocate new data with additional space for shape[0] pointers. */
  866. addsize = base->shape[0] * (sizeof (char *));
  867. /* Align array start to a multiple of 8. */
  868. addsize = 8 * ((addsize + 7) / 8);
  869. data = PyMem_Malloc(ndbuf->len + addsize);
  870. if (data == NULL) {
  871. PyErr_NoMemory();
  872. return -1;
  873. }
  874. memcpy(data + addsize, ndbuf->data, ndbuf->len);
  875. PyMem_Free(ndbuf->data);
  876. ndbuf->data = data;
  877. ndbuf->len += addsize;
  878. base->buf = ndbuf->data;
  879. /* imin: minimum index of the input array relative to ndbuf->offset.
  880. suboffset0: offset for each sub-array of the output. This is the
  881. same as calculating -imin' for a sub-array of ndim-1. */
  882. imin = suboffset0 = 0;
  883. for (n = 0; n < base->ndim; n++) {
  884. if (base->shape[n] == 0)
  885. break;
  886. if (base->strides[n] <= 0) {
  887. Py_ssize_t x = (base->shape[n]-1) * base->strides[n];
  888. imin += x;
  889. suboffset0 += (n >= 1) ? -x : 0;
  890. }
  891. }
  892. /* Initialize the array of pointers to the sub-arrays. */
  893. start = addsize + ndbuf->offset + imin;
  894. step = base->strides[0] < 0 ? -base->strides[0] : base->strides[0];
  895. for (n = 0; n < base->shape[0]; n++)
  896. ((char **)base->buf)[n] = (char *)base->buf + start + n*step;
  897. /* Initialize suboffsets. */
  898. base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
  899. if (base->suboffsets == NULL) {
  900. PyErr_NoMemory();
  901. return -1;
  902. }
  903. base->suboffsets[0] = suboffset0;
  904. for (n = 1; n < base->ndim; n++)
  905. base->suboffsets[n] = -1;
  906. /* Adjust strides for the first (zeroth) dimension. */
  907. if (base->strides[0] >= 0) {
  908. base->strides[0] = sizeof(char *);
  909. }
  910. else {
  911. /* Striding backwards. */
  912. base->strides[0] = -(Py_ssize_t)sizeof(char *);
  913. if (base->shape[0] > 0)
  914. base->buf = (char *)base->buf + (base->shape[0]-1) * sizeof(char *);
  915. }
  916. ndbuf->flags &= ~(ND_C|ND_FORTRAN);
  917. ndbuf->offset = 0;
  918. return 0;
  919. }
  920. static void
  921. init_len(Py_buffer *base)
  922. {
  923. Py_ssize_t i;
  924. base->len = 1;
  925. for (i = 0; i < base->ndim; i++)
  926. base->len *= base->shape[i];
  927. base->len *= base->itemsize;
  928. }
  929. static int
  930. init_structure(ndbuf_t *ndbuf, PyObject *shape, PyObject *strides,
  931. Py_ssize_t ndim)
  932. {
  933. Py_buffer *base = &ndbuf->base;
  934. base->ndim = (int)ndim;
  935. if (ndim == 0) {
  936. if (ndbuf->flags & ND_PIL) {
  937. PyErr_SetString(PyExc_TypeError,
  938. "ndim = 0 cannot be used in conjunction with ND_PIL");
  939. return -1;
  940. }
  941. ndbuf->flags |= (ND_SCALAR|ND_C|ND_FORTRAN);
  942. return 0;
  943. }
  944. /* shape */
  945. base->shape = seq_as_ssize_array(shape, ndim, 1);
  946. if (base->shape == NULL)
  947. return -1;
  948. /* strides */
  949. if (strides) {
  950. base->strides = seq_as_ssize_array(strides, ndim, 0);
  951. }
  952. else {
  953. base->strides = strides_from_shape(ndbuf, ndbuf->flags);
  954. }
  955. if (base->strides == NULL)
  956. return -1;
  957. if (verify_structure(base->len, base->itemsize, ndbuf->offset,
  958. base->shape, base->strides, ndim) < 0)
  959. return -1;
  960. /* buf */
  961. base->buf = ndbuf->data + ndbuf->offset;
  962. /* len */
  963. init_len(base);
  964. /* ndbuf->flags */
  965. if (PyBuffer_IsContiguous(base, 'C'))
  966. ndbuf->flags |= ND_C;
  967. if (PyBuffer_IsContiguous(base, 'F'))
  968. ndbuf->flags |= ND_FORTRAN;
  969. /* convert numpy array to suboffset representation */
  970. if (ndbuf->flags & ND_PIL) {
  971. /* modifies base->buf, base->strides and base->suboffsets **/
  972. return init_suboffsets(ndbuf);
  973. }
  974. return 0;
  975. }
  976. static ndbuf_t *
  977. init_ndbuf(PyObject *items, PyObject *shape, PyObject *strides,
  978. Py_ssize_t offset, PyObject *format, int flags)
  979. {
  980. ndbuf_t *ndbuf;
  981. Py_ssize_t ndim;
  982. Py_ssize_t nitems;
  983. Py_ssize_t itemsize;
  984. /* ndim = len(shape) */
  985. CHECK_LIST_OR_TUPLE(shape)
  986. ndim = PySequence_Fast_GET_SIZE(shape);
  987. if (ndim > ND_MAX_NDIM) {
  988. PyErr_Format(PyExc_ValueError,
  989. "ndim must not exceed %d", ND_MAX_NDIM);
  990. return NULL;
  991. }
  992. /* len(strides) = len(shape) */
  993. if (strides) {
  994. CHECK_LIST_OR_TUPLE(strides)
  995. if (PySequence_Fast_GET_SIZE(strides) == 0)
  996. strides = NULL;
  997. else if (flags & ND_FORTRAN) {
  998. PyErr_SetString(PyExc_TypeError,
  999. "ND_FORTRAN cannot be used together with strides");
  1000. return NULL;
  1001. }
  1002. else if (PySequence_Fast_GET_SIZE(strides) != ndim) {
  1003. PyErr_SetString(PyExc_ValueError,
  1004. "len(shape) != len(strides)");
  1005. return NULL;
  1006. }
  1007. }
  1008. /* itemsize */
  1009. itemsize = get_itemsize(format);
  1010. if (itemsize <= 0) {
  1011. if (itemsize == 0) {
  1012. PyErr_SetString(PyExc_ValueError,
  1013. "itemsize must not be zero");
  1014. }
  1015. return NULL;
  1016. }
  1017. /* convert scalar to list */
  1018. if (ndim == 0) {
  1019. items = Py_BuildValue("(O)", items);
  1020. if (items == NULL)
  1021. return NULL;
  1022. }
  1023. else {
  1024. CHECK_LIST_OR_TUPLE(items)
  1025. Py_INCREF(items);
  1026. }
  1027. /* number of items */
  1028. nitems = PySequence_Fast_GET_SIZE(items);
  1029. if (nitems == 0) {
  1030. PyErr_SetString(PyExc_ValueError,
  1031. "initializer list or tuple must not be empty");
  1032. Py_DECREF(items);
  1033. return NULL;
  1034. }
  1035. ndbuf = ndbuf_new(nitems, itemsize, offset, flags);
  1036. if (ndbuf == NULL) {
  1037. Py_DECREF(items);
  1038. return NULL;
  1039. }
  1040. if (init_simple(ndbuf, items, format, itemsize) < 0)
  1041. goto error;
  1042. if (init_structure(ndbuf, shape, strides, ndim) < 0)
  1043. goto error;
  1044. Py_DECREF(items);
  1045. return ndbuf;
  1046. error:
  1047. Py_DECREF(items);
  1048. ndbuf_free(ndbuf);
  1049. return NULL;
  1050. }
  1051. /* initialize and push a new base onto the linked list */
  1052. static int
  1053. ndarray_push_base(NDArrayObject *nd, PyObject *items,
  1054. PyObject *shape, PyObject *strides,
  1055. Py_ssize_t offset, PyObject *format, int flags)
  1056. {
  1057. ndbuf_t *ndbuf;
  1058. ndbuf = init_ndbuf(items, shape, strides, offset, format, flags);
  1059. if (ndbuf == NULL)
  1060. return -1;
  1061. ndbuf_push(nd, ndbuf);
  1062. return 0;
  1063. }
  1064. #define PyBUF_UNUSED 0x10000
  1065. static int
  1066. ndarray_init(PyObject *self, PyObject *args, PyObject *kwds)
  1067. {
  1068. NDArrayObject *nd = (NDArrayObject *)self;
  1069. static char *kwlist[] = {
  1070. "obj", "shape", "strides", "offset", "format", "flags", "getbuf", NULL
  1071. };
  1072. PyObject *v = NULL; /* initializer: scalar, list, tuple or base object */
  1073. PyObject *shape = NULL; /* size of each dimension */
  1074. PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
  1075. Py_ssize_t offset = 0; /* buffer offset */
  1076. PyObject *format = simple_format; /* struct module specifier: "B" */
  1077. int flags = ND_DEFAULT; /* base buffer and ndarray flags */
  1078. int getbuf = PyBUF_UNUSED; /* re-exporter: getbuffer request flags */
  1079. if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|OOnOii", kwlist,
  1080. &v, &shape, &strides, &offset, &format, &flags, &getbuf))
  1081. return -1;
  1082. /* NDArrayObject is re-exporter */
  1083. if (PyObject_CheckBuffer(v) && shape == NULL) {
  1084. if (strides || offset || format != simple_format ||
  1085. !(flags == ND_DEFAULT || flags == ND_REDIRECT)) {
  1086. PyErr_SetString(PyExc_TypeError,
  1087. "construction from exporter object only takes 'obj', 'getbuf' "
  1088. "and 'flags' arguments");
  1089. return -1;
  1090. }
  1091. getbuf = (getbuf == PyBUF_UNUSED) ? PyBUF_FULL_RO : getbuf;
  1092. if (ndarray_init_staticbuf(v, nd, getbuf) < 0)
  1093. return -1;
  1094. init_flags(nd->head);
  1095. nd->head->flags |= flags;
  1096. return 0;
  1097. }
  1098. /* NDArrayObject is the original base object. */
  1099. if (getbuf != PyBUF_UNUSED) {
  1100. PyErr_SetString(PyExc_TypeError,
  1101. "getbuf argument only valid for construction from exporter "
  1102. "object");
  1103. return -1;
  1104. }
  1105. if (shape == NULL) {
  1106. PyErr_SetString(PyExc_TypeError,
  1107. "shape is a required argument when constructing from "
  1108. "list, tuple or scalar");
  1109. return -1;
  1110. }
  1111. if (flags & ND_VAREXPORT) {
  1112. nd->flags |= ND_VAREXPORT;
  1113. flags &= ~ND_VAREXPORT;
  1114. }
  1115. /* Initialize and push the first base buffer onto the linked list. */
  1116. return ndarray_push_base(nd, v, shape, strides, offset, format, flags);
  1117. }
  1118. /* Push an additional base onto the linked list. */
  1119. static PyObject *
  1120. ndarray_push(PyObject *self, PyObject *args, PyObject *kwds)
  1121. {
  1122. NDArrayObject *nd = (NDArrayObject *)self;
  1123. static char *kwlist[] = {
  1124. "items", "shape", "strides", "offset", "format", "flags", NULL
  1125. };
  1126. PyObject *items = NULL; /* initializer: scalar, list or tuple */
  1127. PyObject *shape = NULL; /* size of each dimension */
  1128. PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
  1129. PyObject *format = simple_format; /* struct module specifier: "B" */
  1130. Py_ssize_t offset = 0; /* buffer offset */
  1131. int flags = ND_DEFAULT; /* base buffer flags */
  1132. if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO|OnOi", kwlist,
  1133. &items, &shape, &strides, &offset, &format, &flags))
  1134. return NULL;
  1135. if (flags & ND_VAREXPORT) {
  1136. PyErr_SetString(PyExc_ValueError,
  1137. "ND_VAREXPORT flag can only be used during object creation");
  1138. return NULL;
  1139. }
  1140. if (ND_IS_CONSUMER(nd)) {
  1141. PyErr_SetString(PyExc_BufferError,
  1142. "structure of re-exporting object is immutable");
  1143. return NULL;
  1144. }
  1145. if (!(nd->flags&ND_VAREXPORT) && nd->head->exports > 0) {
  1146. PyErr_Format(PyExc_BufferError,
  1147. "cannot change structure: %zd exported buffer%s",
  1148. nd->head->exports, nd->head->exports==1 ? "" : "s");
  1149. return NULL;
  1150. }
  1151. if (ndarray_push_base(nd, items, shape, strides,
  1152. offset, format, flags) < 0)
  1153. return NULL;
  1154. Py_RETURN_NONE;
  1155. }
  1156. /* Pop a base from the linked list (if possible). */
  1157. static PyObject *
  1158. ndarray_pop(PyObject *self, PyObject *dummy)
  1159. {
  1160. NDArrayObject *nd = (NDArrayObject *)self;
  1161. if (ND_IS_CONSUMER(nd)) {
  1162. PyErr_SetString(PyExc_BufferError,
  1163. "structure of re-exporting object is immutable");
  1164. return NULL;
  1165. }
  1166. if (nd->head->exports > 0) {
  1167. PyErr_Format(PyExc_BufferError,
  1168. "cannot change structure: %zd exported buffer%s",
  1169. nd->head->exports, nd->head->exports==1 ? "" : "s");
  1170. return NULL;
  1171. }
  1172. if (nd->head->next == NULL) {
  1173. PyErr_SetString(PyExc_BufferError,
  1174. "list only has a single base");
  1175. return NULL;
  1176. }
  1177. ndbuf_pop(nd);
  1178. Py_RETURN_NONE;
  1179. }
  1180. /**************************************************************************/
  1181. /* getbuffer */
  1182. /**************************************************************************/
  1183. static int
  1184. ndarray_getbuf(NDArrayObject *self, Py_buffer *view, int flags)
  1185. {
  1186. ndbuf_t *ndbuf = self->head;
  1187. Py_buffer *base = &ndbuf->base;
  1188. int baseflags = ndbuf->flags;
  1189. /* redirect mode */
  1190. if (base->obj != NULL && (baseflags&ND_REDIRECT)) {
  1191. return PyObject_GetBuffer(base->obj, view, flags);
  1192. }
  1193. /* start with complete information */
  1194. *view = *base;
  1195. view->obj = NULL;
  1196. /* reconstruct format */
  1197. if (view->format == NULL)
  1198. view->format = "B";
  1199. if (base->ndim != 0 &&
  1200. ((REQ_SHAPE(flags) && base->shape == NULL) ||
  1201. (REQ_STRIDES(flags) && base->strides == NULL))) {
  1202. /* The ndarray is a re-exporter that has been created without full
  1203. information for testing purposes. In this particular case the
  1204. ndarray is not a PEP-3118 compliant buffer provider. */
  1205. PyErr_SetString(PyExc_BufferError,
  1206. "re-exporter does not provide format, shape or strides");
  1207. return -1;
  1208. }
  1209. if (baseflags & ND_GETBUF_FAIL) {
  1210. PyErr_SetString(PyExc_BufferError,
  1211. "ND_GETBUF_FAIL: forced test exception");
  1212. if (baseflags & ND_GETBUF_UNDEFINED)
  1213. view->obj = (PyObject *)0x1; /* wrong but permitted in <= 3.2 */
  1214. return -1;
  1215. }
  1216. if (REQ_WRITABLE(flags) && base->readonly) {
  1217. PyErr_SetString(PyExc_BufferError,
  1218. "ndarray is not writable");
  1219. return -1;
  1220. }
  1221. if (!REQ_FORMAT(flags)) {
  1222. /* NULL indicates that the buffer's data type has been cast to 'B'.
  1223. view->itemsize is the _previous_ itemsize. If shape is present,
  1224. the equality product(shape) * itemsize = len still holds at this
  1225. point. The equality calcsize(format) = itemsize does _not_ hold
  1226. from here on! */
  1227. view->format = NULL;
  1228. }
  1229. if (REQ_C_CONTIGUOUS(flags) && !ND_C_CONTIGUOUS(baseflags)) {
  1230. PyErr_SetString(PyExc_BufferError,
  1231. "ndarray is not C-contiguous");
  1232. return -1;
  1233. }
  1234. if (REQ_F_CONTIGUOUS(flags) && !ND_FORTRAN_CONTIGUOUS(baseflags)) {
  1235. PyErr_SetString(PyExc_BufferError,
  1236. "ndarray is not Fortran contiguous");
  1237. return -1;
  1238. }
  1239. if (REQ_ANY_CONTIGUOUS(flags) && !ND_ANY_CONTIGUOUS(baseflags)) {
  1240. PyErr_SetString(PyExc_BufferError,
  1241. "ndarray is not contiguous");
  1242. return -1;
  1243. }
  1244. if (!REQ_INDIRECT(flags) && (baseflags & ND_PIL)) {
  1245. PyErr_SetString(PyExc_BufferError,
  1246. "ndarray cannot be represented without suboffsets");
  1247. return -1;
  1248. }
  1249. if (!REQ_STRIDES(flags)) {
  1250. if (!ND_C_CONTIGUOUS(baseflags)) {
  1251. PyErr_SetString(PyExc_BufferError,
  1252. "ndarray is not C-contiguous");
  1253. return -1;
  1254. }
  1255. view->strides = NULL;
  1256. }
  1257. if (!REQ_SHAPE(flags)) {
  1258. /* PyBUF_SIMPLE or PyBUF_WRITABLE: at this point buf is C-contiguous,
  1259. so base->buf = ndbuf->data. */
  1260. if (view->format != NULL) {
  1261. /* PyBUF_SIMPLE|PyBUF_FORMAT and PyBUF_WRITABLE|PyBUF_FORMAT do
  1262. not make sense. */
  1263. PyErr_Format(PyExc_BufferError,
  1264. "ndarray: cannot cast to unsigned bytes if the format flag "
  1265. "is present");
  1266. return -1;
  1267. }
  1268. /* product(shape) * itemsize = len and calcsize(format) = itemsize
  1269. do _not_ hold from here on! */
  1270. view->ndim = 1;
  1271. view->shape = NULL;
  1272. }
  1273. view->obj = (PyObject *)self;
  1274. Py_INCREF(view->obj);
  1275. self->head->exports++;
  1276. return 0;
  1277. }
  1278. static int
  1279. ndarray_releasebuf(NDArrayObject *self, Py_buffer *view)
  1280. {
  1281. if (!ND_IS_CONSUMER(self)) {
  1282. ndbuf_t *ndbuf = view->internal;
  1283. if (--ndbuf->exports == 0 && ndbuf != self->head)
  1284. ndbuf_delete(self, ndbuf);
  1285. }
  1286. return 0;
  1287. }
  1288. static PyBufferProcs ndarray_as_buffer = {
  1289. (getbufferproc)ndarray_getbuf, /* bf_getbuffer */
  1290. (releasebufferproc)ndarray_releasebuf /* bf_releasebuffer */
  1291. };
  1292. /**************************************************************************/
  1293. /* indexing/slicing */
  1294. /**************************************************************************/
  1295. static char *
  1296. ptr_from_index(Py_buffer *base, Py_ssize_t index)
  1297. {
  1298. char *ptr;
  1299. Py_ssize_t nitems; /* items in the first dimension */
  1300. if (base->shape)
  1301. nitems = base->shape[0];
  1302. else {
  1303. assert(base->ndim == 1 && SIMPLE_FORMAT(base->format));
  1304. nitems = base->len;
  1305. }
  1306. if (index < 0) {
  1307. index += nitems;
  1308. }
  1309. if (index < 0 || index >= nitems) {
  1310. PyErr_SetString(PyExc_IndexError, "index out of bounds");
  1311. return NULL;
  1312. }
  1313. ptr = (char *)base->buf;
  1314. if (base->strides == NULL)
  1315. ptr += base->itemsize * index;
  1316. else
  1317. ptr += base->strides[0] * index;
  1318. ptr = ADJUST_PTR(ptr, base->suboffsets);
  1319. return ptr;
  1320. }
  1321. static PyObject *
  1322. ndarray_item(NDArrayObject *self, Py_ssize_t index)
  1323. {
  1324. ndbuf_t *ndbuf = self->head;
  1325. Py_buffer *base = &ndbuf->base;
  1326. char *ptr;
  1327. if (base->ndim == 0) {
  1328. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1329. return NULL;
  1330. }
  1331. ptr = ptr_from_index(base, index);
  1332. if (ptr == NULL)
  1333. return NULL;
  1334. if (base->ndim == 1) {
  1335. return unpack_single(ptr, base->format, base->itemsize);
  1336. }
  1337. else {
  1338. NDArrayObject *nd;
  1339. Py_buffer *subview;
  1340. nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
  1341. if (nd == NULL)
  1342. return NULL;
  1343. if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
  1344. Py_DECREF(nd);
  1345. return NULL;
  1346. }
  1347. subview = &nd->staticbuf.base;
  1348. subview->buf = ptr;
  1349. subview->len /= subview->shape[0];
  1350. subview->ndim--;
  1351. subview->shape++;
  1352. if (subview->strides) subview->strides++;
  1353. if (subview->suboffsets) subview->suboffsets++;
  1354. init_flags(&nd->staticbuf);
  1355. return (PyObject *)nd;
  1356. }
  1357. }
  1358. /*
  1359. For each dimension, we get valid (start, stop, step, slicelength) quadruples
  1360. from PySlice_GetIndicesEx().
  1361. Slicing NumPy arrays
  1362. ====================
  1363. A pointer to an element in a NumPy array is defined by:
  1364. ptr = (char *)buf + indices[0] * strides[0] +
  1365. ... +
  1366. indices[ndim-1] * strides[ndim-1]
  1367. Adjust buf:
  1368. -----------
  1369. Adding start[n] for each dimension effectively adds the constant:
  1370. c = start[0] * strides[0] + ... + start[ndim-1] * strides[ndim-1]
  1371. Therefore init_slice() adds all start[n] directly to buf.
  1372. Adjust shape:
  1373. -------------
  1374. Obviously shape[n] = slicelength[n]
  1375. Adjust strides:
  1376. ---------------
  1377. In the original array, the next element in a dimension is reached
  1378. by adding strides[n] to the pointer. In the sliced array, elements
  1379. may be skipped, so the next element is reached by adding:
  1380. strides[n] * step[n]
  1381. Slicing PIL arrays
  1382. ==================
  1383. Layout:
  1384. -------
  1385. In the first (zeroth) dimension, PIL arrays have an array of pointers
  1386. to sub-arrays of ndim-1. Striding in the first dimension is done by
  1387. getting the index of the nth pointer, dereference it and then add a
  1388. suboffset to it. The arrays pointed to can best be seen a regular
  1389. NumPy arrays.
  1390. Adjust buf:
  1391. -----------
  1392. In the original array, buf points to a location (usually the start)
  1393. in the array of pointers. For the sliced array, start[0] can be
  1394. added to buf in the same manner as for NumPy arrays.
  1395. Adjust suboffsets:
  1396. ------------------
  1397. Due to the dereferencing step in the addressing scheme, it is not
  1398. possible to adjust buf for higher dimensions. Recall that the
  1399. sub-arrays pointed to are regular NumPy arrays, so for each of
  1400. those arrays adding start[n] effectively adds the constant:
  1401. c = start[1] * strides[1] + ... + start[ndim-1] * strides[ndim-1]
  1402. This constant is added to suboffsets[0]. suboffsets[0] in turn is
  1403. added to each pointer right after dereferencing.
  1404. Adjust shape and strides:
  1405. -------------------------
  1406. Shape and strides are not influenced by the dereferencing step, so
  1407. they are adjusted in the same manner as for NumPy arrays.
  1408. Multiple levels of suboffsets
  1409. =============================
  1410. For a construct like an array of pointers to array of pointers to
  1411. sub-arrays of ndim-2:
  1412. suboffsets[0] = start[1] * strides[1]
  1413. suboffsets[1] = start[2] * strides[2] + ...
  1414. */
  1415. static int
  1416. init_slice(Py_buffer *base, PyObject *key, int dim)
  1417. {
  1418. Py_ssize_t start, stop, step, slicelength;
  1419. if (PySlice_GetIndicesEx(key, base->shape[dim],
  1420. &start, &stop, &step, &slicelength) < 0) {
  1421. return -1;
  1422. }
  1423. if (base->suboffsets == NULL || dim == 0) {
  1424. adjust_buf:
  1425. base->buf = (char *)base->buf + base->strides[dim] * start;
  1426. }
  1427. else {
  1428. Py_ssize_t n = dim-1;
  1429. while (n >= 0 && base->suboffsets[n] < 0)
  1430. n--;
  1431. if (n < 0)
  1432. goto adjust_buf; /* all suboffsets are negative */
  1433. base->suboffsets[n] = base->suboffsets[n] + base->strides[dim] * start;
  1434. }
  1435. base->shape[dim] = slicelength;
  1436. base->strides[dim] = base->strides[dim] * step;
  1437. return 0;
  1438. }
  1439. static int
  1440. copy_structure(Py_buffer *base)
  1441. {
  1442. Py_ssize_t *shape = NULL, *strides = NULL, *suboffsets = NULL;
  1443. Py_ssize_t i;
  1444. shape = PyMem_Malloc(base->ndim * (sizeof *shape));
  1445. strides = PyMem_Malloc(base->ndim * (sizeof *strides));
  1446. if (shape == NULL || strides == NULL)
  1447. goto err_nomem;
  1448. suboffsets = NULL;
  1449. if (base->suboffsets) {
  1450. suboffsets = PyMem_Malloc(base->ndim * (sizeof *suboffsets));
  1451. if (suboffsets == NULL)
  1452. goto err_nomem;
  1453. }
  1454. for (i = 0; i < base->ndim; i++) {
  1455. shape[i] = base->shape[i];
  1456. strides[i] = base->strides[i];
  1457. if (suboffsets)
  1458. suboffsets[i] = base->suboffsets[i];
  1459. }
  1460. base->shape = shape;
  1461. base->strides = strides;
  1462. base->suboffsets = suboffsets;
  1463. return 0;
  1464. err_nomem:
  1465. PyErr_NoMemory();
  1466. PyMem_XFree(shape);
  1467. PyMem_XFree(strides);
  1468. PyMem_XFree(suboffsets);
  1469. return -1;
  1470. }
  1471. static PyObject *
  1472. ndarray_subscript(NDArrayObject *self, PyObject *key)
  1473. {
  1474. NDArrayObject *nd;
  1475. ndbuf_t *ndbuf;
  1476. Py_buffer *base = &self->head->base;
  1477. if (base->ndim == 0) {
  1478. if (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0) {
  1479. return unpack_single(base->buf, base->format, base->itemsize);
  1480. }
  1481. else if (key == Py_Ellipsis) {
  1482. Py_INCREF(self);
  1483. return (PyObject *)self;
  1484. }
  1485. else {
  1486. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1487. return NULL;
  1488. }
  1489. }
  1490. if (PyIndex_Check(key)) {
  1491. Py_ssize_t index = PyLong_AsSsize_t(key);
  1492. if (index == -1 && PyErr_Occurred())
  1493. return NULL;
  1494. return ndarray_item(self, index);
  1495. }
  1496. nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
  1497. if (nd == NULL)
  1498. return NULL;
  1499. /* new ndarray is a consumer */
  1500. if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
  1501. Py_DECREF(nd);
  1502. return NULL;
  1503. }
  1504. /* copy shape, strides and suboffsets */
  1505. ndbuf = nd->head;
  1506. base = &ndbuf->base;
  1507. if (copy_structure(base) < 0) {
  1508. Py_DECREF(nd);
  1509. return NULL;
  1510. }
  1511. ndbuf->flags |= ND_OWN_ARRAYS;
  1512. if (PySlice_Check(key)) {
  1513. /* one-dimensional slice */
  1514. if (init_slice(base, key, 0) < 0)
  1515. goto err_occurred;
  1516. }
  1517. else if (PyTuple_Check(key)) {
  1518. /* multi-dimensional slice */
  1519. PyObject *tuple = key;
  1520. Py_ssize_t i, n;
  1521. n = PyTuple_GET_SIZE(tuple);
  1522. for (i = 0; i < n; i++) {
  1523. key = PyTuple_GET_ITEM(tuple, i);
  1524. if (!PySlice_Check(key))
  1525. goto type_error;
  1526. if (init_slice(base, key, (int)i) < 0)
  1527. goto err_occurred;
  1528. }
  1529. }
  1530. else {
  1531. goto type_error;
  1532. }
  1533. init_len(base);
  1534. init_flags(ndbuf);
  1535. return (PyObject *)nd;
  1536. type_error:
  1537. PyErr_Format(PyExc_TypeError,
  1538. "cannot index memory using \"%.200s\"",
  1539. key->ob_type->tp_name);
  1540. err_occurred:
  1541. Py_DECREF(nd);
  1542. return NULL;
  1543. }
  1544. static int
  1545. ndarray_ass_subscript(NDArrayObject *self, PyObject *key, PyObject *value)
  1546. {
  1547. NDArrayObject *nd;
  1548. Py_buffer *dest = &self->head->base;
  1549. Py_buffer src;
  1550. char *ptr;
  1551. Py_ssize_t index;
  1552. int ret = -1;
  1553. if (dest->readonly) {
  1554. PyErr_SetString(PyExc_TypeError, "ndarray is not writable");
  1555. return -1;
  1556. }
  1557. if (value == NULL) {
  1558. PyErr_SetString(PyExc_TypeError, "ndarray data cannot be deleted");
  1559. return -1;
  1560. }
  1561. if (dest->ndim == 0) {
  1562. if (key == Py_Ellipsis ||
  1563. (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0)) {
  1564. ptr = (char *)dest->buf;
  1565. return pack_single(ptr, value, dest->format, dest->itemsize);
  1566. }
  1567. else {
  1568. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1569. return -1;
  1570. }
  1571. }
  1572. if (dest->ndim == 1 && PyIndex_Check(key)) {
  1573. /* rvalue must be a single item */
  1574. index = PyLong_AsSsize_t(key);
  1575. if (index == -1 && PyErr_Occurred())
  1576. return -1;
  1577. else {
  1578. ptr = ptr_from_index(dest, index);
  1579. if (ptr == NULL)
  1580. return -1;
  1581. }
  1582. return pack_single(ptr, value, dest->format, dest->itemsize);
  1583. }
  1584. /* rvalue must be an exporter */
  1585. if (PyObject_GetBuffer(value, &src, PyBUF_FULL_RO) == -1)
  1586. return -1;
  1587. nd = (NDArrayObject *)ndarray_subscript(self, key);
  1588. if (nd != NULL) {
  1589. dest = &nd->head->base;
  1590. ret = copy_buffer(dest, &src);
  1591. Py_DECREF(nd);
  1592. }
  1593. PyBuffer_Release(&src);
  1594. return ret;
  1595. }
  1596. static PyObject *
  1597. slice_indices(PyObject *self, PyObject *args)
  1598. {
  1599. PyObject *ret, *key, *tmp;
  1600. Py_ssize_t s[4]; /* start, stop, step, slicelength */
  1601. Py_ssize_t i, len;
  1602. if (!PyArg_ParseTuple(args, "On", &key, &len)) {
  1603. return NULL;
  1604. }
  1605. if (!PySlice_Check(key)) {
  1606. PyErr_SetString(PyExc_TypeError,
  1607. "first argument must be a slice object");
  1608. return NULL;
  1609. }
  1610. if (PySlice_GetIndicesEx(key, len, &s[0], &s[1], &s[2], &s[3]) < 0) {
  1611. return NULL;
  1612. }
  1613. ret = PyTuple_New(4);
  1614. if (ret == NULL)
  1615. return NULL;
  1616. for (i = 0; i < 4; i++) {
  1617. tmp = PyLong_FromSsize_t(s[i]);
  1618. if (tmp == NULL)
  1619. goto error;
  1620. PyTuple_SET_ITEM(ret, i, tmp);
  1621. }
  1622. return ret;
  1623. error:
  1624. Py_DECREF(ret);
  1625. return NULL;
  1626. }
  1627. static PyMappingMethods ndarray_as_mapping = {
  1628. NULL, /* mp_length */
  1629. (binaryfunc)ndarray_subscript, /* mp_subscript */
  1630. (objobjargproc)ndarray_ass_subscript /* mp_ass_subscript */
  1631. };
  1632. static PySequenceMethods ndarray_as_sequence = {
  1633. 0, /* sq_length */
  1634. 0, /* sq_concat */
  1635. 0, /* sq_repeat */
  1636. (ssizeargfunc)ndarray_item, /* sq_item */
  1637. };
  1638. /**************************************************************************/
  1639. /* getters */
  1640. /**************************************************************************/
  1641. static PyObject *
  1642. ssize_array_as_tuple(Py_ssize_t *array, Py_ssize_t len)
  1643. {
  1644. PyObject *tuple, *x;
  1645. Py_ssize_t i;
  1646. if (array == NULL)
  1647. return PyTuple_New(0);
  1648. tuple = PyTuple_New(len);
  1649. if (tuple == NULL)
  1650. return NULL;
  1651. for (i = 0; i < len; i++) {
  1652. x = PyLong_FromSsize_t(array[i]);
  1653. if (x == NULL) {
  1654. Py_DECREF(tuple);
  1655. return NULL;
  1656. }
  1657. PyTuple_SET_ITEM(tuple, i, x);
  1658. }
  1659. return tuple;
  1660. }
  1661. static PyObject *
  1662. ndarray_get_flags(NDArrayObject *self, void *closure)
  1663. {
  1664. return PyLong_FromLong(self->head->flags);
  1665. }
  1666. static PyObject *
  1667. ndarray_get_offset(NDArrayObject *self, void *closure)
  1668. {
  1669. ndbuf_t *ndbuf = self->head;
  1670. return PyLong_FromSsize_t(ndbuf->offset);
  1671. }
  1672. static PyObject *
  1673. ndarray_get_obj(NDArrayObject *self, void *closure)
  1674. {
  1675. Py_buffer *base = &self->head->base;
  1676. if (base->obj == NULL) {
  1677. Py_RETURN_NONE;
  1678. }
  1679. Py_INCREF(base->obj);
  1680. return base->obj;
  1681. }
  1682. static PyObject *
  1683. ndarray_get_nbytes(NDArrayObject *self, void *closure)
  1684. {
  1685. Py_buffer *base = &self->head->base;
  1686. return PyLong_FromSsize_t(base->len);
  1687. }
  1688. static PyObject *
  1689. ndarray_get_readonly(NDArrayObject *self, void *closure)
  1690. {
  1691. Py_buffer *base = &self->head->base;
  1692. return PyLong_FromLong(base->readonly);
  1693. }
  1694. static PyObject *
  1695. ndarray_get_itemsize(NDArrayObject *self, void *closure)
  1696. {
  1697. Py_buffer *base = &self->head->base;
  1698. return PyLong_FromSsize_t(base->itemsize);
  1699. }
  1700. static PyObject *
  1701. ndarray_get_format(NDArrayObject *self, void *closure)
  1702. {
  1703. Py_buffer *base = &self->head->base;
  1704. char *fmt = base->format ? base->format : "";
  1705. return PyUnicode_FromString(fmt);
  1706. }
  1707. static PyObject *
  1708. ndarray_get_ndim(NDArrayObject *self, void *closure)
  1709. {
  1710. Py_buffer *base = &self->head->base;
  1711. return PyLong_FromSsize_t(base->ndim);
  1712. }
  1713. static PyObject *
  1714. ndarray_get_shape(NDArrayObject *self, void *closure)
  1715. {
  1716. Py_buffer *base = &self->head->base;
  1717. return ssize_array_as_tuple(base->shape, base->ndim);
  1718. }
  1719. static PyObject *
  1720. ndarray_get_strides(NDArrayObject *self, void *closure)
  1721. {
  1722. Py_buffer *base = &self->head->base;
  1723. return ssize_array_as_tuple(base->strides, base->ndim);
  1724. }
  1725. static PyObject *
  1726. ndarray_get_suboffsets(NDArrayObject *self, void *closure)
  1727. {
  1728. Py_buffer *base = &self->head->base;
  1729. return ssize_array_as_tuple(base->suboffsets, base->ndim);
  1730. }
  1731. static PyObject *
  1732. ndarray_c_contig(PyObject *self, PyObject *dummy)
  1733. {
  1734. NDArrayObject *nd = (NDArrayObject *)self;
  1735. int ret = PyBuffer_IsContiguous(&nd->head->base, 'C');
  1736. if (ret != ND_C_CONTIGUOUS(nd->head->flags)) {
  1737. PyErr_SetString(PyExc_RuntimeError,
  1738. "results from PyBuffer_IsContiguous() and flags differ");
  1739. return NULL;
  1740. }
  1741. return PyBool_FromLong(ret);
  1742. }
  1743. static PyObject *
  1744. ndarray_fortran_contig(PyObject *self, PyObject *dummy)
  1745. {
  1746. NDArrayObject *nd = (NDArrayObject *)self;
  1747. int ret = PyBuffer_IsContiguous(&nd->head->base, 'F');
  1748. if (ret != ND_FORTRAN_CONTIGUOUS(nd->head->flags)) {
  1749. PyErr_SetString(PyExc_RuntimeError,
  1750. "results from PyBuffer_IsContiguous() and flags differ");
  1751. return NULL;
  1752. }
  1753. return PyBool_FromLong(ret);
  1754. }
  1755. static PyObject *
  1756. ndarray_contig(PyObject *self, PyObject *dummy)
  1757. {
  1758. NDArrayObject *nd = (NDArrayObject *)self;
  1759. int ret = PyBuffer_IsContiguous(&nd->head->base, 'A');
  1760. if (ret != ND_ANY_CONTIGUOUS(nd->head->flags)) {
  1761. PyErr_SetString(PyExc_RuntimeError,
  1762. "results from PyBuffer_IsContiguous() and flags differ");
  1763. return NULL;
  1764. }
  1765. return PyBool_FromLong(ret);
  1766. }
  1767. static PyGetSetDef ndarray_getset [] =
  1768. {
  1769. /* ndbuf */
  1770. { "flags", (getter)ndarray_get_flags, NULL, NULL, NULL},
  1771. { "offset", (getter)ndarray_get_offset, NULL, NULL, NULL},
  1772. /* ndbuf.base */
  1773. { "obj", (getter)ndarray_get_obj, NULL, NULL, NULL},
  1774. { "nbytes", (getter)ndarray_get_nbytes, NULL, NULL, NULL},
  1775. { "readonly", (getter)ndarray_get_readonly, NULL, NULL, NULL},
  1776. { "itemsize", (getter)ndarray_get_itemsize, NULL, NULL, NULL},
  1777. { "format", (getter)ndarray_get_format, NULL, NULL, NULL},
  1778. { "ndim", (getter)ndarray_get_ndim, NULL, NULL, NULL},
  1779. { "shape", (getter)ndarray_get_shape, NULL, NULL, NULL},
  1780. { "strides", (getter)ndarray_get_strides, NULL, NULL, NULL},
  1781. { "suboffsets", (getter)ndarray_get_suboffsets, NULL, NULL, NULL},
  1782. { "c_contiguous", (getter)ndarray_c_contig, NULL, NULL, NULL},
  1783. { "f_contiguous", (getter)ndarray_fortran_contig, NULL, NULL, NULL},
  1784. { "contiguous", (getter)ndarray_contig, NULL, NULL, NULL},
  1785. {NULL}
  1786. };
  1787. static PyObject *
  1788. ndarray_tolist(PyObject *self, PyObject *dummy)
  1789. {
  1790. return ndarray_as_list((NDArrayObject *)self);
  1791. }
  1792. static PyObject *
  1793. ndarray_tobytes(PyObject *self, PyObject *dummy)
  1794. {
  1795. ndbuf_t *ndbuf = ((NDArrayObject *)self)->head;
  1796. Py_buffer *src = &ndbuf->base;
  1797. Py_buffer dest;
  1798. PyObject *ret = NULL;
  1799. char *mem;
  1800. if (ND_C_CONTIGUOUS(ndbuf->flags))
  1801. return PyBytes_FromStringAndSize(src->buf, src->len);
  1802. assert(src->shape != NULL);
  1803. assert(src->strides != NULL);
  1804. assert(src->ndim > 0);
  1805. mem = PyMem_Malloc(src->len);
  1806. if (mem == NULL) {
  1807. PyErr_NoMemory();
  1808. return NULL;
  1809. }
  1810. dest = *src;
  1811. dest.buf = mem;
  1812. dest.suboffsets = NULL;
  1813. dest.strides = strides_from_shape(ndbuf, 0);
  1814. if (dest.strides == NULL)
  1815. goto out;
  1816. if (copy_buffer(&dest, src) < 0)
  1817. goto out;
  1818. ret = PyBytes_FromStringAndSize(mem, src->len);
  1819. out:
  1820. PyMem_XFree(dest.strides);
  1821. PyMem_Free(mem);
  1822. return ret;
  1823. }
  1824. /* add redundant (negative) suboffsets for testing */
  1825. static PyObject *
  1826. ndarray_add_suboffsets(PyObject *self, PyObject *dummy)
  1827. {
  1828. NDArrayObject *nd = (NDArrayObject *)self;
  1829. Py_buffer *base = &nd->head->base;
  1830. Py_ssize_t i;
  1831. if (base->suboffsets != NULL) {
  1832. PyErr_SetString(PyExc_TypeError,
  1833. "cannot add suboffsets to PIL-style array");
  1834. return NULL;
  1835. }
  1836. if (base->strides == NULL) {
  1837. PyErr_SetString(PyExc_TypeError,
  1838. "cannot add suboffsets to array without strides");
  1839. return NULL;
  1840. }
  1841. base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
  1842. if (base->suboffsets == NULL) {
  1843. PyErr_NoMemory();
  1844. return NULL;
  1845. }
  1846. for (i = 0; i < base->ndim; i++)
  1847. base->suboffsets[i] = -1;
  1848. Py_RETURN_NONE;
  1849. }
  1850. /* Test PyMemoryView_FromBuffer(): return a memoryview from a static buffer.
  1851. Obviously this is fragile and only one such view may be active at any
  1852. time. Never use anything like this in real code! */
  1853. static char *infobuf = NULL;
  1854. static PyObject *
  1855. ndarray_memoryview_from_buffer(PyObject *self, PyObject *dummy)
  1856. {
  1857. const NDArrayObject *nd = (NDArrayObject *)self;
  1858. const Py_buffer *view = &nd->head->base;
  1859. const ndbuf_t *ndbuf;
  1860. static char format[ND_MAX_NDIM+1];
  1861. static Py_ssize_t shape[ND_MAX_NDIM];
  1862. static Py_ssize_t strides[ND_MAX_NDIM];
  1863. static Py_ssize_t suboffsets[ND_MAX_NDIM];
  1864. static Py_buffer info;
  1865. char *p;
  1866. if (!ND_IS_CONSUMER(nd))
  1867. ndbuf = nd->head; /* self is ndarray/original exporter */
  1868. else if (NDArray_Check(view->obj) && !ND_IS_CONSUMER(view->obj))
  1869. /* self is ndarray and consumer from ndarray/original exporter */
  1870. ndbuf = ((NDArrayObject *)view->obj)->head;
  1871. else {
  1872. PyErr_SetString(PyExc_TypeError,
  1873. "memoryview_from_buffer(): ndarray must be original exporter or "
  1874. "consumer from ndarray/original exporter");
  1875. return NULL;
  1876. }
  1877. info = *view;
  1878. p = PyMem_Realloc(infobuf, ndbuf->len);
  1879. if (p == NULL) {
  1880. PyMem_Free(infobuf);
  1881. PyErr_NoMemory();
  1882. infobuf = NULL;
  1883. return NULL;
  1884. }
  1885. else {
  1886. infobuf = p;
  1887. }
  1888. /* copy the complete raw data */
  1889. memcpy(infobuf, ndbuf->data, ndbuf->len);
  1890. info.buf = infobuf + ((char *)view->buf - ndbuf->data);
  1891. if (view->format) {
  1892. if (strlen(view->format) > ND_MAX_NDIM) {
  1893. PyErr_Format(PyExc_TypeError,
  1894. "memoryview_from_buffer: format is limited to %d characters",
  1895. ND_MAX_NDIM);
  1896. return NULL;
  1897. }
  1898. strcpy(format, view->format);
  1899. info.format = format;
  1900. }
  1901. if (view->ndim > ND_MAX_NDIM) {
  1902. PyErr_Format(PyExc_TypeError,
  1903. "memoryview_from_buffer: ndim is limited to %d", ND_MAX_NDIM);
  1904. return NULL;
  1905. }
  1906. if (view->shape) {
  1907. memcpy(shape, view->shape, view->ndim * sizeof(Py_ssize_t));
  1908. info.shape = shape;
  1909. }
  1910. if (view->strides) {
  1911. memcpy(strides, view->strides, view->ndim * sizeof(Py_ssize_t));
  1912. info.strides = strides;
  1913. }
  1914. if (view->suboffsets) {
  1915. memcpy(suboffsets, view->suboffsets, view->ndim * sizeof(Py_ssize_t));
  1916. info.suboffsets = suboffsets;
  1917. }
  1918. return PyMemoryView_FromBuffer(&info);
  1919. }
  1920. /* Get a single item from bufobj at the location specified by seq.
  1921. seq is a list or tuple of indices. The purpose of this function
  1922. is to check other functions against PyBuffer_GetPointer(). */
  1923. static PyObject *
  1924. get_pointer(PyObject *self, PyObject *args)
  1925. {
  1926. PyObject *ret = NULL, *bufobj, *seq;
  1927. Py_buffer view;
  1928. Py_ssize_t indices[ND_MAX_NDIM];
  1929. Py_ssize_t i;
  1930. void *ptr;
  1931. if (!PyArg_ParseTuple(args, "OO", &bufobj, &seq)) {
  1932. return NULL;
  1933. }
  1934. CHECK_LIST_OR_TUPLE(seq);
  1935. if (PyObject_GetBuffer(bufobj, &view, PyBUF_FULL_RO) < 0)
  1936. return NULL;
  1937. if (view.ndim > ND_MAX_NDIM) {
  1938. PyErr_Format(PyExc_ValueError,
  1939. "get_pointer(): ndim > %d", ND_MAX_NDIM);
  1940. goto out;
  1941. }
  1942. if (PySequence_Fast_GET_SIZE(seq) != view.ndim) {
  1943. PyErr_SetString(PyExc_ValueError,
  1944. "get_pointer(): len(indices) != ndim");
  1945. goto out;
  1946. }
  1947. for (i = 0; i < view.ndim; i++) {
  1948. PyObject *x = PySequence_Fast_GET_ITEM(seq, i);
  1949. indices[i] = PyLong_AsSsize_t(x);
  1950. if (PyErr_Occurred())
  1951. goto out;
  1952. if (indices[i] < 0 || indices[i] >= view.shape[i]) {
  1953. PyErr_Format(PyExc_ValueError,
  1954. "get_pointer(): invalid index %zd at position %zd",
  1955. indices[i], i);
  1956. goto out;
  1957. }
  1958. }
  1959. ptr = PyBuffer_GetPointer(&view, indices);
  1960. ret = unpack_single(ptr, view.format, view.itemsize);
  1961. out:
  1962. PyBuffer_Release(&view);
  1963. return ret;
  1964. }
  1965. static PyObject *
  1966. get_sizeof_void_p(PyObject *self)
  1967. {
  1968. return PyLong_FromSize_t(sizeof(void *));
  1969. }
  1970. static char
  1971. get_ascii_order(PyObject *order)
  1972. {
  1973. PyObject *ascii_order;
  1974. char ord;
  1975. if (!PyUnicode_Check(order)) {
  1976. PyErr_SetString(PyExc_TypeError,
  1977. "order must be a string");
  1978. return CHAR_MAX;
  1979. }
  1980. ascii_order = PyUnicode_AsASCIIString(order);
  1981. if (ascii_order == NULL) {
  1982. return CHAR_MAX;
  1983. }
  1984. ord = PyBytes_AS_STRING(ascii_order)[0];
  1985. Py_DECREF(ascii_order);
  1986. if (ord != 'C' && ord != 'F' && ord != 'A') {
  1987. PyErr_SetString(PyExc_ValueError,
  1988. "invalid order, must be C, F or A");
  1989. return CHAR_MAX;
  1990. }
  1991. return ord;
  1992. }
  1993. /* Get a contiguous memoryview. */
  1994. static PyObject *
  1995. get_contiguous(PyObject *self, PyObject *args)
  1996. {
  1997. PyObject *obj;
  1998. PyObject *buffertype;
  1999. PyObject *order;
  2000. long type;
  2001. char ord;
  2002. if (!PyArg_ParseTuple(args, "OOO", &obj, &buffertype, &order)) {
  2003. return NULL;
  2004. }
  2005. if (!PyLong_Check(buffertype)) {
  2006. PyErr_SetString(PyExc_TypeError,
  2007. "buffertype must be PyBUF_READ or PyBUF_WRITE");
  2008. return NULL;
  2009. }
  2010. type = PyLong_AsLong(buffertype);
  2011. if (type == -1 && PyErr_Occurred()) {
  2012. return NULL;
  2013. }
  2014. if (type != PyBUF_READ && type != PyBUF_WRITE) {
  2015. PyErr_SetString(PyExc_ValueError,
  2016. "invalid buffer type");
  2017. return NULL;
  2018. }
  2019. ord = get_ascii_order(order);
  2020. if (ord == CHAR_MAX)
  2021. return NULL;
  2022. return PyMemoryView_GetContiguous(obj, (int)type, ord);
  2023. }
  2024. /* PyBuffer_ToContiguous() */
  2025. static PyObject *
  2026. py_buffer_to_contiguous(PyObject *self, PyObject *args)
  2027. {
  2028. PyObject *obj;
  2029. PyObject *order;
  2030. PyObject *ret = NULL;
  2031. int flags;
  2032. char ord;
  2033. Py_buffer view;
  2034. char *buf = NULL;
  2035. if (!PyArg_ParseTuple(args, "OOi", &obj, &order, &flags)) {
  2036. return NULL;
  2037. }
  2038. if (PyObject_GetBuffer(obj, &view, flags) < 0) {
  2039. return NULL;
  2040. }
  2041. ord = get_ascii_order(order);
  2042. if (ord == CHAR_MAX) {
  2043. goto out;
  2044. }
  2045. buf = PyMem_Malloc(view.len);
  2046. if (buf == NULL) {
  2047. PyErr_NoMemory();
  2048. goto out;
  2049. }
  2050. if (PyBuffer_ToContiguous(buf, &view, view.len, ord) < 0) {
  2051. goto out;
  2052. }
  2053. ret = PyBytes_FromStringAndSize(buf, view.len);
  2054. out:
  2055. PyBuffer_Release(&view);
  2056. PyMem_XFree(buf);
  2057. return ret;
  2058. }
  2059. static int
  2060. fmtcmp(const char *fmt1, const char *fmt2)
  2061. {
  2062. if (fmt1 == NULL) {
  2063. return fmt2 == NULL || strcmp(fmt2, "B") == 0;
  2064. }
  2065. if (fmt2 == NULL) {
  2066. return fmt1 == NULL || strcmp(fmt1, "B") == 0;
  2067. }
  2068. return strcmp(fmt1, fmt2) == 0;
  2069. }
  2070. static int
  2071. arraycmp(const Py_ssize_t *a1, const Py_ssize_t *a2, const Py_ssize_t *shape,
  2072. Py_ssize_t ndim)
  2073. {
  2074. Py_ssize_t i;
  2075. if (ndim == 1 && shape && shape[0] == 1) {
  2076. /* This is for comparing strides: For example, the array
  2077. [175], shape=[1], strides=[-5] is considered contiguous. */
  2078. return 1;
  2079. }
  2080. for (i = 0; i < ndim; i++) {
  2081. if (a1[i] != a2[i]) {
  2082. return 0;
  2083. }
  2084. }
  2085. return 1;
  2086. }
  2087. /* Compare two contiguous buffers for physical equality. */
  2088. static PyObject *
  2089. cmp_contig(PyObject *self, PyObject *args)
  2090. {
  2091. PyObject *b1, *b2; /* buffer objects */
  2092. Py_buffer v1, v2;
  2093. PyObject *ret;
  2094. int equal = 0;
  2095. if (!PyArg_ParseTuple(args, "OO", &b1, &b2)) {
  2096. return NULL;
  2097. }
  2098. if (PyObject_GetBuffer(b1, &v1, PyBUF_FULL_RO) < 0) {
  2099. PyErr_SetString(PyExc_TypeError,
  2100. "cmp_contig: first argument does not implement the buffer "
  2101. "protocol");
  2102. return NULL;
  2103. }
  2104. if (PyObject_GetBuffer(b2, &v2, PyBUF_FULL_RO) < 0) {
  2105. PyErr_SetString(PyExc_TypeError,
  2106. "cmp_contig: second argument does not implement the buffer "
  2107. "protocol");
  2108. PyBuffer_Release(&v1);
  2109. return NULL;
  2110. }
  2111. if (!(PyBuffer_IsContiguous(&v1, 'C')&&PyBuffer_IsContiguous(&v2, 'C')) &&
  2112. !(PyBuffer_IsContiguous(&v1, 'F')&&PyBuffer_IsContiguous(&v2, 'F'))) {
  2113. goto result;
  2114. }
  2115. /* readonly may differ if created from non-contiguous */
  2116. if (v1.len != v2.len ||
  2117. v1.itemsize != v2.itemsize ||
  2118. v1.ndim != v2.ndim ||
  2119. !fmtcmp(v1.format, v2.format) ||
  2120. !!v1.shape != !!v2.shape ||
  2121. !!v1.strides != !!v2.strides ||
  2122. !!v1.suboffsets != !!v2.suboffsets) {
  2123. goto result;
  2124. }
  2125. if ((v1.shape && !arraycmp(v1.shape, v2.shape, NULL, v1.ndim)) ||
  2126. (v1.strides && !arraycmp(v1.strides, v2.strides, v1.shape, v1.ndim)) ||
  2127. (v1.suboffsets && !arraycmp(v1.suboffsets, v2.suboffsets, NULL,
  2128. v1.ndim))) {
  2129. goto result;
  2130. }
  2131. if (memcmp((char *)v1.buf, (char *)v2.buf, v1.len) != 0) {
  2132. goto result;
  2133. }
  2134. equal = 1;
  2135. result:
  2136. PyBuffer_Release(&v1);
  2137. PyBuffer_Release(&v2);
  2138. ret = equal ? Py_True : Py_False;
  2139. Py_INCREF(ret);
  2140. return ret;
  2141. }
  2142. static PyObject *
  2143. is_contiguous(PyObject *self, PyObject *args)
  2144. {
  2145. PyObject *obj;
  2146. PyObject *order;
  2147. PyObject *ret = NULL;
  2148. Py_buffer view;
  2149. char ord;
  2150. if (!PyArg_ParseTuple(args, "OO", &obj, &order)) {
  2151. return NULL;
  2152. }
  2153. if (PyObject_GetBuffer(obj, &view, PyBUF_FULL_RO) < 0) {
  2154. PyErr_SetString(PyExc_TypeError,
  2155. "is_contiguous: object does not implement the buffer "
  2156. "protocol");
  2157. return NULL;
  2158. }
  2159. ord = get_ascii_order(order);
  2160. if (ord == CHAR_MAX) {
  2161. goto release;
  2162. }
  2163. ret = PyBuffer_IsContiguous(&view, ord) ? Py_True : Py_False;
  2164. Py_INCREF(ret);
  2165. release:
  2166. PyBuffer_Release(&view);
  2167. return ret;
  2168. }
  2169. static Py_hash_t
  2170. ndarray_hash(PyObject *self)
  2171. {
  2172. const NDArrayObject *nd = (NDArrayObject *)self;
  2173. const Py_buffer *view = &nd->head->base;
  2174. PyObject *bytes;
  2175. Py_hash_t hash;
  2176. if (!view->readonly) {
  2177. PyErr_SetString(PyExc_ValueError,
  2178. "cannot hash writable ndarray object");
  2179. return -1;
  2180. }
  2181. if (view->obj != NULL && PyObject_Hash(view->obj) == -1) {
  2182. return -1;
  2183. }
  2184. bytes = ndarray_tobytes(self, NULL);
  2185. if (bytes == NULL) {
  2186. return -1;
  2187. }
  2188. hash = PyObject_Hash(bytes);
  2189. Py_DECREF(bytes);
  2190. return hash;
  2191. }
  2192. static PyMethodDef ndarray_methods [] =
  2193. {
  2194. { "tolist", ndarray_tolist, METH_NOARGS, NULL },
  2195. { "tobytes", ndarray_tobytes, METH_NOARGS, NULL },
  2196. { "push", (PyCFunction)ndarray_push, METH_VARARGS|METH_KEYWORDS, NULL },
  2197. { "pop", ndarray_pop, METH_NOARGS, NULL },
  2198. { "add_suboffsets", ndarray_add_suboffsets, METH_NOARGS, NULL },
  2199. { "memoryview_from_buffer", ndarray_memoryview_from_buffer, METH_NOARGS, NULL },
  2200. {NULL}
  2201. };
  2202. static PyTypeObject NDArray_Type = {
  2203. PyVarObject_HEAD_INIT(NULL, 0)
  2204. "ndarray", /* Name of this type */
  2205. sizeof(NDArrayObject), /* Basic object size */
  2206. 0, /* Item size for varobject */
  2207. (destructor)ndarray_dealloc, /* tp_dealloc */
  2208. 0, /* tp_print */
  2209. 0, /* tp_getattr */
  2210. 0, /* tp_setattr */
  2211. 0, /* tp_compare */
  2212. 0, /* tp_repr */
  2213. 0, /* tp_as_number */
  2214. &ndarray_as_sequence, /* tp_as_sequence */
  2215. &ndarray_as_mapping, /* tp_as_mapping */
  2216. (hashfunc)ndarray_hash, /* tp_hash */
  2217. 0, /* tp_call */
  2218. 0, /* tp_str */
  2219. PyObject_GenericGetAttr, /* tp_getattro */
  2220. 0, /* tp_setattro */
  2221. &ndarray_as_buffer, /* tp_as_buffer */
  2222. Py_TPFLAGS_DEFAULT, /* tp_flags */
  2223. 0, /* tp_doc */
  2224. 0, /* tp_traverse */
  2225. 0, /* tp_clear */
  2226. 0, /* tp_richcompare */
  2227. 0, /* tp_weaklistoffset */
  2228. 0, /* tp_iter */
  2229. 0, /* tp_iternext */
  2230. ndarray_methods, /* tp_methods */
  2231. 0, /* tp_members */
  2232. ndarray_getset, /* tp_getset */
  2233. 0, /* tp_base */
  2234. 0, /* tp_dict */
  2235. 0, /* tp_descr_get */
  2236. 0, /* tp_descr_set */
  2237. 0, /* tp_dictoffset */
  2238. ndarray_init, /* tp_init */
  2239. 0, /* tp_alloc */
  2240. ndarray_new, /* tp_new */
  2241. };
  2242. /**************************************************************************/
  2243. /* StaticArray Object */
  2244. /**************************************************************************/
  2245. static PyTypeObject StaticArray_Type;
  2246. typedef struct {
  2247. PyObject_HEAD
  2248. int legacy_mode; /* if true, use the view.obj==NULL hack */
  2249. } StaticArrayObject;
  2250. static char static_mem[12] = {0,1,2,3,4,5,6,7,8,9,10,11};
  2251. static Py_ssize_t static_shape[1] = {12};
  2252. static Py_ssize_t static_strides[1] = {1};
  2253. static Py_buffer static_buffer = {
  2254. static_mem, /* buf */
  2255. NULL, /* obj */
  2256. 12, /* len */
  2257. 1, /* itemsize */
  2258. 1, /* readonly */
  2259. 1, /* ndim */
  2260. "B", /* format */
  2261. static_shape, /* shape */
  2262. static_strides, /* strides */
  2263. NULL, /* suboffsets */
  2264. NULL /* internal */
  2265. };
  2266. static PyObject *
  2267. staticarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
  2268. {
  2269. return (PyObject *)PyObject_New(StaticArrayObject, &StaticArray_Type);
  2270. }
  2271. static int
  2272. staticarray_init(PyObject *self, PyObject *args, PyObject *kwds)
  2273. {
  2274. StaticArrayObject *a = (StaticArrayObject *)self;
  2275. static char *kwlist[] = {
  2276. "legacy_mode", NULL
  2277. };
  2278. PyObject *legacy_mode = Py_False;
  2279. if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O", kwlist, &legacy_mode))
  2280. return -1;
  2281. a->legacy_mode = (legacy_mode != Py_False);
  2282. return 0;
  2283. }
  2284. static void
  2285. staticarray_dealloc(StaticArrayObject *self)
  2286. {
  2287. PyObject_Del(self);
  2288. }
  2289. /* Return a buffer for a PyBUF_FULL_RO request. Flags are not checked,
  2290. which makes this object a non-compliant exporter! */
  2291. static int
  2292. staticarray_getbuf(StaticArrayObject *self, Py_buffer *view, int flags)
  2293. {
  2294. *view = static_buffer;
  2295. if (self->legacy_mode) {
  2296. view->obj = NULL; /* Don't use this in new code. */
  2297. }
  2298. else {
  2299. view->obj = (PyObject *)self;
  2300. Py_INCREF(view->obj);
  2301. }
  2302. return 0;
  2303. }
  2304. static PyBufferProcs staticarray_as_buffer = {
  2305. (getbufferproc)staticarray_getbuf, /* bf_getbuffer */
  2306. NULL, /* bf_releasebuffer */
  2307. };
  2308. static PyTypeObject StaticArray_Type = {
  2309. PyVarObject_HEAD_INIT(NULL, 0)
  2310. "staticarray", /* Name of this type */
  2311. sizeof(StaticArrayObject), /* Basic object size */
  2312. 0, /* Item size for varobject */
  2313. (destructor)staticarray_dealloc, /* tp_dealloc */
  2314. 0, /* tp_print */
  2315. 0, /* tp_getattr */
  2316. 0, /* tp_setattr */
  2317. 0, /* tp_compare */
  2318. 0, /* tp_repr */
  2319. 0, /* tp_as_number */
  2320. 0, /* tp_as_sequence */
  2321. 0, /* tp_as_mapping */
  2322. 0, /* tp_hash */
  2323. 0, /* tp_call */
  2324. 0, /* tp_str */
  2325. 0, /* tp_getattro */
  2326. 0, /* tp_setattro */
  2327. &staticarray_as_buffer, /* tp_as_buffer */
  2328. Py_TPFLAGS_DEFAULT, /* tp_flags */
  2329. 0, /* tp_doc */
  2330. 0, /* tp_traverse */
  2331. 0, /* tp_clear */
  2332. 0, /* tp_richcompare */
  2333. 0, /* tp_weaklistoffset */
  2334. 0, /* tp_iter */
  2335. 0, /* tp_iternext */
  2336. 0, /* tp_methods */
  2337. 0, /* tp_members */
  2338. 0, /* tp_getset */
  2339. 0, /* tp_base */
  2340. 0, /* tp_dict */
  2341. 0, /* tp_descr_get */
  2342. 0, /* tp_descr_set */
  2343. 0, /* tp_dictoffset */
  2344. staticarray_init, /* tp_init */
  2345. 0, /* tp_alloc */
  2346. staticarray_new, /* tp_new */
  2347. };
  2348. static struct PyMethodDef _testbuffer_functions[] = {
  2349. {"slice_indices", slice_indices, METH_VARARGS, NULL},
  2350. {"get_pointer", get_pointer, METH_VARARGS, NULL},
  2351. {"get_sizeof_void_p", (PyCFunction)get_sizeof_void_p, METH_NOARGS, NULL},
  2352. {"get_contiguous", get_contiguous, METH_VARARGS, NULL},
  2353. {"py_buffer_to_contiguous", py_buffer_to_contiguous, METH_VARARGS, NULL},
  2354. {"is_contiguous", is_contiguous, METH_VARARGS, NULL},
  2355. {"cmp_contig", cmp_contig, METH_VARARGS, NULL},
  2356. {NULL, NULL}
  2357. };
  2358. static struct PyModuleDef _testbuffermodule = {
  2359. PyModuleDef_HEAD_INIT,
  2360. "_testbuffer",
  2361. NULL,
  2362. -1,
  2363. _testbuffer_functions,
  2364. NULL,
  2365. NULL,
  2366. NULL,
  2367. NULL
  2368. };
  2369. PyMODINIT_FUNC
  2370. PyInit__testbuffer(void)
  2371. {
  2372. PyObject *m;
  2373. m = PyModule_Create(&_testbuffermodule);
  2374. if (m == NULL)
  2375. return NULL;
  2376. Py_TYPE(&NDArray_Type) = &PyType_Type;
  2377. Py_INCREF(&NDArray_Type);
  2378. PyModule_AddObject(m, "ndarray", (PyObject *)&NDArray_Type);
  2379. Py_TYPE(&StaticArray_Type) = &PyType_Type;
  2380. Py_INCREF(&StaticArray_Type);
  2381. PyModule_AddObject(m, "staticarray", (PyObject *)&StaticArray_Type);
  2382. structmodule = PyImport_ImportModule("struct");
  2383. if (structmodule == NULL)
  2384. return NULL;
  2385. Struct = PyObject_GetAttrString(structmodule, "Struct");
  2386. calcsize = PyObject_GetAttrString(structmodule, "calcsize");
  2387. if (Struct == NULL || calcsize == NULL)
  2388. return NULL;
  2389. simple_format = PyUnicode_FromString(simple_fmt);
  2390. if (simple_format == NULL)
  2391. return NULL;
  2392. PyModule_AddIntMacro(m, ND_MAX_NDIM);
  2393. PyModule_AddIntMacro(m, ND_VAREXPORT);
  2394. PyModule_AddIntMacro(m, ND_WRITABLE);
  2395. PyModule_AddIntMacro(m, ND_FORTRAN);
  2396. PyModule_AddIntMacro(m, ND_SCALAR);
  2397. PyModule_AddIntMacro(m, ND_PIL);
  2398. PyModule_AddIntMacro(m, ND_GETBUF_FAIL);
  2399. PyModule_AddIntMacro(m, ND_GETBUF_UNDEFINED);
  2400. PyModule_AddIntMacro(m, ND_REDIRECT);
  2401. PyModule_AddIntMacro(m, PyBUF_SIMPLE);
  2402. PyModule_AddIntMacro(m, PyBUF_WRITABLE);
  2403. PyModule_AddIntMacro(m, PyBUF_FORMAT);
  2404. PyModule_AddIntMacro(m, PyBUF_ND);
  2405. PyModule_AddIntMacro(m, PyBUF_STRIDES);
  2406. PyModule_AddIntMacro(m, PyBUF_INDIRECT);
  2407. PyModule_AddIntMacro(m, PyBUF_C_CONTIGUOUS);
  2408. PyModule_AddIntMacro(m, PyBUF_F_CONTIGUOUS);
  2409. PyModule_AddIntMacro(m, PyBUF_ANY_CONTIGUOUS);
  2410. PyModule_AddIntMacro(m, PyBUF_FULL);
  2411. PyModule_AddIntMacro(m, PyBUF_FULL_RO);
  2412. PyModule_AddIntMacro(m, PyBUF_RECORDS);
  2413. PyModule_AddIntMacro(m, PyBUF_RECORDS_RO);
  2414. PyModule_AddIntMacro(m, PyBUF_STRIDED);
  2415. PyModule_AddIntMacro(m, PyBUF_STRIDED_RO);
  2416. PyModule_AddIntMacro(m, PyBUF_CONTIG);
  2417. PyModule_AddIntMacro(m, PyBUF_CONTIG_RO);
  2418. PyModule_AddIntMacro(m, PyBUF_READ);
  2419. PyModule_AddIntMacro(m, PyBUF_WRITE);
  2420. return m;
  2421. }