You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2894 lines
82 KiB

  1. /* C Extension module to test all aspects of PEP-3118.
  2. Written by Stefan Krah. */
  3. #define PY_SSIZE_T_CLEAN
  4. #include "Python.h"
  5. /* struct module */
  6. PyObject *structmodule = NULL;
  7. PyObject *Struct = NULL;
  8. PyObject *calcsize = NULL;
  9. /* cache simple format string */
  10. static const char *simple_fmt = "B";
  11. PyObject *simple_format = NULL;
  12. #define SIMPLE_FORMAT(fmt) (fmt == NULL || strcmp(fmt, "B") == 0)
  13. #define FIX_FORMAT(fmt) (fmt == NULL ? "B" : fmt)
  14. /**************************************************************************/
  15. /* NDArray Object */
  16. /**************************************************************************/
  17. static PyTypeObject NDArray_Type;
  18. #define NDArray_Check(v) (Py_TYPE(v) == &NDArray_Type)
  19. #define CHECK_LIST_OR_TUPLE(v) \
  20. if (!PyList_Check(v) && !PyTuple_Check(v)) { \
  21. PyErr_SetString(PyExc_TypeError, \
  22. #v " must be a list or a tuple"); \
  23. return NULL; \
  24. } \
  25. #define PyMem_XFree(v) \
  26. do { if (v) PyMem_Free(v); } while (0)
  27. /* Maximum number of dimensions. */
  28. #define ND_MAX_NDIM (2 * PyBUF_MAX_NDIM)
  29. /* Check for the presence of suboffsets in the first dimension. */
  30. #define HAVE_PTR(suboffsets) (suboffsets && suboffsets[0] >= 0)
  31. /* Adjust ptr if suboffsets are present. */
  32. #define ADJUST_PTR(ptr, suboffsets) \
  33. (HAVE_PTR(suboffsets) ? *((char**)ptr) + suboffsets[0] : ptr)
  34. /* Default: NumPy style (strides), read-only, no var-export, C-style layout */
  35. #define ND_DEFAULT 0x000
  36. /* User configurable flags for the ndarray */
  37. #define ND_VAREXPORT 0x001 /* change layout while buffers are exported */
  38. /* User configurable flags for each base buffer */
  39. #define ND_WRITABLE 0x002 /* mark base buffer as writable */
  40. #define ND_FORTRAN 0x004 /* Fortran contiguous layout */
  41. #define ND_SCALAR 0x008 /* scalar: ndim = 0 */
  42. #define ND_PIL 0x010 /* convert to PIL-style array (suboffsets) */
  43. #define ND_REDIRECT 0x020 /* redirect buffer requests */
  44. #define ND_GETBUF_FAIL 0x040 /* trigger getbuffer failure */
  45. #define ND_GETBUF_UNDEFINED 0x080 /* undefined view.obj */
  46. /* Internal flags for the base buffer */
  47. #define ND_C 0x100 /* C contiguous layout (default) */
  48. #define ND_OWN_ARRAYS 0x200 /* consumer owns arrays */
  49. /* ndarray properties */
  50. #define ND_IS_CONSUMER(nd) \
  51. (((NDArrayObject *)nd)->head == &((NDArrayObject *)nd)->staticbuf)
  52. /* ndbuf->flags properties */
  53. #define ND_C_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C)))
  54. #define ND_FORTRAN_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_FORTRAN)))
  55. #define ND_ANY_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C|ND_FORTRAN)))
  56. /* getbuffer() requests */
  57. #define REQ_INDIRECT(flags) ((flags&PyBUF_INDIRECT) == PyBUF_INDIRECT)
  58. #define REQ_C_CONTIGUOUS(flags) ((flags&PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS)
  59. #define REQ_F_CONTIGUOUS(flags) ((flags&PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS)
  60. #define REQ_ANY_CONTIGUOUS(flags) ((flags&PyBUF_ANY_CONTIGUOUS) == PyBUF_ANY_CONTIGUOUS)
  61. #define REQ_STRIDES(flags) ((flags&PyBUF_STRIDES) == PyBUF_STRIDES)
  62. #define REQ_SHAPE(flags) ((flags&PyBUF_ND) == PyBUF_ND)
  63. #define REQ_WRITABLE(flags) (flags&PyBUF_WRITABLE)
  64. #define REQ_FORMAT(flags) (flags&PyBUF_FORMAT)
  65. /* Single node of a list of base buffers. The list is needed to implement
  66. changes in memory layout while exported buffers are active. */
  67. static PyTypeObject NDArray_Type;
  68. struct ndbuf;
  69. typedef struct ndbuf {
  70. struct ndbuf *next;
  71. struct ndbuf *prev;
  72. Py_ssize_t len; /* length of data */
  73. Py_ssize_t offset; /* start of the array relative to data */
  74. char *data; /* raw data */
  75. int flags; /* capabilities of the base buffer */
  76. Py_ssize_t exports; /* number of exports */
  77. Py_buffer base; /* base buffer */
  78. } ndbuf_t;
  79. typedef struct {
  80. PyObject_HEAD
  81. int flags; /* ndarray flags */
  82. ndbuf_t staticbuf; /* static buffer for re-exporting mode */
  83. ndbuf_t *head; /* currently active base buffer */
  84. } NDArrayObject;
  85. static ndbuf_t *
  86. ndbuf_new(Py_ssize_t nitems, Py_ssize_t itemsize, Py_ssize_t offset, int flags)
  87. {
  88. ndbuf_t *ndbuf;
  89. Py_buffer *base;
  90. Py_ssize_t len;
  91. len = nitems * itemsize;
  92. if (offset % itemsize) {
  93. PyErr_SetString(PyExc_ValueError,
  94. "offset must be a multiple of itemsize");
  95. return NULL;
  96. }
  97. if (offset < 0 || offset+itemsize > len) {
  98. PyErr_SetString(PyExc_ValueError, "offset out of bounds");
  99. return NULL;
  100. }
  101. ndbuf = PyMem_Malloc(sizeof *ndbuf);
  102. if (ndbuf == NULL) {
  103. PyErr_NoMemory();
  104. return NULL;
  105. }
  106. ndbuf->next = NULL;
  107. ndbuf->prev = NULL;
  108. ndbuf->len = len;
  109. ndbuf->offset= offset;
  110. ndbuf->data = PyMem_Malloc(len);
  111. if (ndbuf->data == NULL) {
  112. PyErr_NoMemory();
  113. PyMem_Free(ndbuf);
  114. return NULL;
  115. }
  116. ndbuf->flags = flags;
  117. ndbuf->exports = 0;
  118. base = &ndbuf->base;
  119. base->obj = NULL;
  120. base->buf = ndbuf->data;
  121. base->len = len;
  122. base->itemsize = 1;
  123. base->readonly = 0;
  124. base->format = NULL;
  125. base->ndim = 1;
  126. base->shape = NULL;
  127. base->strides = NULL;
  128. base->suboffsets = NULL;
  129. base->internal = ndbuf;
  130. return ndbuf;
  131. }
  132. static void
  133. ndbuf_free(ndbuf_t *ndbuf)
  134. {
  135. Py_buffer *base = &ndbuf->base;
  136. PyMem_XFree(ndbuf->data);
  137. PyMem_XFree(base->format);
  138. PyMem_XFree(base->shape);
  139. PyMem_XFree(base->strides);
  140. PyMem_XFree(base->suboffsets);
  141. PyMem_Free(ndbuf);
  142. }
  143. static void
  144. ndbuf_push(NDArrayObject *nd, ndbuf_t *elt)
  145. {
  146. elt->next = nd->head;
  147. if (nd->head) nd->head->prev = elt;
  148. nd->head = elt;
  149. elt->prev = NULL;
  150. }
  151. static void
  152. ndbuf_delete(NDArrayObject *nd, ndbuf_t *elt)
  153. {
  154. if (elt->prev)
  155. elt->prev->next = elt->next;
  156. else
  157. nd->head = elt->next;
  158. if (elt->next)
  159. elt->next->prev = elt->prev;
  160. ndbuf_free(elt);
  161. }
  162. static void
  163. ndbuf_pop(NDArrayObject *nd)
  164. {
  165. ndbuf_delete(nd, nd->head);
  166. }
  167. static PyObject *
  168. ndarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
  169. {
  170. NDArrayObject *nd;
  171. nd = PyObject_New(NDArrayObject, &NDArray_Type);
  172. if (nd == NULL)
  173. return NULL;
  174. nd->flags = 0;
  175. nd->head = NULL;
  176. return (PyObject *)nd;
  177. }
  178. static void
  179. ndarray_dealloc(NDArrayObject *self)
  180. {
  181. if (self->head) {
  182. if (ND_IS_CONSUMER(self)) {
  183. Py_buffer *base = &self->head->base;
  184. if (self->head->flags & ND_OWN_ARRAYS) {
  185. PyMem_XFree(base->shape);
  186. PyMem_XFree(base->strides);
  187. PyMem_XFree(base->suboffsets);
  188. }
  189. PyBuffer_Release(base);
  190. }
  191. else {
  192. while (self->head)
  193. ndbuf_pop(self);
  194. }
  195. }
  196. PyObject_Del(self);
  197. }
  198. static int
  199. ndarray_init_staticbuf(PyObject *exporter, NDArrayObject *nd, int flags)
  200. {
  201. Py_buffer *base = &nd->staticbuf.base;
  202. if (PyObject_GetBuffer(exporter, base, flags) < 0)
  203. return -1;
  204. nd->head = &nd->staticbuf;
  205. nd->head->next = NULL;
  206. nd->head->prev = NULL;
  207. nd->head->len = -1;
  208. nd->head->offset = -1;
  209. nd->head->data = NULL;
  210. nd->head->flags = base->readonly ? 0 : ND_WRITABLE;
  211. nd->head->exports = 0;
  212. return 0;
  213. }
  214. static void
  215. init_flags(ndbuf_t *ndbuf)
  216. {
  217. if (ndbuf->base.ndim == 0)
  218. ndbuf->flags |= ND_SCALAR;
  219. if (ndbuf->base.suboffsets)
  220. ndbuf->flags |= ND_PIL;
  221. if (PyBuffer_IsContiguous(&ndbuf->base, 'C'))
  222. ndbuf->flags |= ND_C;
  223. if (PyBuffer_IsContiguous(&ndbuf->base, 'F'))
  224. ndbuf->flags |= ND_FORTRAN;
  225. }
  226. /****************************************************************************/
  227. /* Buffer/List conversions */
  228. /****************************************************************************/
  229. static Py_ssize_t *strides_from_shape(const ndbuf_t *, int flags);
  230. /* Get number of members in a struct: see issue #12740 */
  231. typedef struct {
  232. PyObject_HEAD
  233. Py_ssize_t s_size;
  234. Py_ssize_t s_len;
  235. } PyPartialStructObject;
  236. static Py_ssize_t
  237. get_nmemb(PyObject *s)
  238. {
  239. return ((PyPartialStructObject *)s)->s_len;
  240. }
  241. /* Pack all items into the buffer of 'obj'. The 'format' parameter must be
  242. in struct module syntax. For standard C types, a single item is an integer.
  243. For compound types, a single item is a tuple of integers. */
  244. static int
  245. pack_from_list(PyObject *obj, PyObject *items, PyObject *format,
  246. Py_ssize_t itemsize)
  247. {
  248. PyObject *structobj, *pack_into;
  249. PyObject *args, *offset;
  250. PyObject *item, *tmp;
  251. Py_ssize_t nitems; /* number of items */
  252. Py_ssize_t nmemb; /* number of members in a single item */
  253. Py_ssize_t i, j;
  254. int ret = 0;
  255. assert(PyObject_CheckBuffer(obj));
  256. assert(PyList_Check(items) || PyTuple_Check(items));
  257. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  258. if (structobj == NULL)
  259. return -1;
  260. nitems = PySequence_Fast_GET_SIZE(items);
  261. nmemb = get_nmemb(structobj);
  262. assert(nmemb >= 1);
  263. pack_into = PyObject_GetAttrString(structobj, "pack_into");
  264. if (pack_into == NULL) {
  265. Py_DECREF(structobj);
  266. return -1;
  267. }
  268. /* nmemb >= 1 */
  269. args = PyTuple_New(2 + nmemb);
  270. if (args == NULL) {
  271. Py_DECREF(pack_into);
  272. Py_DECREF(structobj);
  273. return -1;
  274. }
  275. offset = NULL;
  276. for (i = 0; i < nitems; i++) {
  277. /* Loop invariant: args[j] are borrowed references or NULL. */
  278. PyTuple_SET_ITEM(args, 0, obj);
  279. for (j = 1; j < 2+nmemb; j++)
  280. PyTuple_SET_ITEM(args, j, NULL);
  281. Py_XDECREF(offset);
  282. offset = PyLong_FromSsize_t(i*itemsize);
  283. if (offset == NULL) {
  284. ret = -1;
  285. break;
  286. }
  287. PyTuple_SET_ITEM(args, 1, offset);
  288. item = PySequence_Fast_GET_ITEM(items, i);
  289. if ((PyBytes_Check(item) || PyLong_Check(item) ||
  290. PyFloat_Check(item)) && nmemb == 1) {
  291. PyTuple_SET_ITEM(args, 2, item);
  292. }
  293. else if ((PyList_Check(item) || PyTuple_Check(item)) &&
  294. PySequence_Length(item) == nmemb) {
  295. for (j = 0; j < nmemb; j++) {
  296. tmp = PySequence_Fast_GET_ITEM(item, j);
  297. PyTuple_SET_ITEM(args, 2+j, tmp);
  298. }
  299. }
  300. else {
  301. PyErr_SetString(PyExc_ValueError,
  302. "mismatch between initializer element and format string");
  303. ret = -1;
  304. break;
  305. }
  306. tmp = PyObject_CallObject(pack_into, args);
  307. if (tmp == NULL) {
  308. ret = -1;
  309. break;
  310. }
  311. Py_DECREF(tmp);
  312. }
  313. Py_INCREF(obj); /* args[0] */
  314. /* args[1]: offset is either NULL or should be dealloc'd */
  315. for (i = 2; i < 2+nmemb; i++) {
  316. tmp = PyTuple_GET_ITEM(args, i);
  317. Py_XINCREF(tmp);
  318. }
  319. Py_DECREF(args);
  320. Py_DECREF(pack_into);
  321. Py_DECREF(structobj);
  322. return ret;
  323. }
  324. /* Pack single element */
  325. static int
  326. pack_single(char *ptr, PyObject *item, const char *fmt, Py_ssize_t itemsize)
  327. {
  328. PyObject *structobj = NULL, *pack_into = NULL, *args = NULL;
  329. PyObject *format = NULL, *mview = NULL, *zero = NULL;
  330. Py_ssize_t i, nmemb;
  331. int ret = -1;
  332. PyObject *x;
  333. if (fmt == NULL) fmt = "B";
  334. format = PyUnicode_FromString(fmt);
  335. if (format == NULL)
  336. goto out;
  337. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  338. if (structobj == NULL)
  339. goto out;
  340. nmemb = get_nmemb(structobj);
  341. assert(nmemb >= 1);
  342. mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_WRITE);
  343. if (mview == NULL)
  344. goto out;
  345. zero = PyLong_FromLong(0);
  346. if (zero == NULL)
  347. goto out;
  348. pack_into = PyObject_GetAttrString(structobj, "pack_into");
  349. if (pack_into == NULL)
  350. goto out;
  351. args = PyTuple_New(2+nmemb);
  352. if (args == NULL)
  353. goto out;
  354. PyTuple_SET_ITEM(args, 0, mview);
  355. PyTuple_SET_ITEM(args, 1, zero);
  356. if ((PyBytes_Check(item) || PyLong_Check(item) ||
  357. PyFloat_Check(item)) && nmemb == 1) {
  358. PyTuple_SET_ITEM(args, 2, item);
  359. }
  360. else if ((PyList_Check(item) || PyTuple_Check(item)) &&
  361. PySequence_Length(item) == nmemb) {
  362. for (i = 0; i < nmemb; i++) {
  363. x = PySequence_Fast_GET_ITEM(item, i);
  364. PyTuple_SET_ITEM(args, 2+i, x);
  365. }
  366. }
  367. else {
  368. PyErr_SetString(PyExc_ValueError,
  369. "mismatch between initializer element and format string");
  370. goto args_out;
  371. }
  372. x = PyObject_CallObject(pack_into, args);
  373. if (x != NULL) {
  374. Py_DECREF(x);
  375. ret = 0;
  376. }
  377. args_out:
  378. for (i = 0; i < 2+nmemb; i++)
  379. Py_XINCREF(PyTuple_GET_ITEM(args, i));
  380. Py_XDECREF(args);
  381. out:
  382. Py_XDECREF(pack_into);
  383. Py_XDECREF(zero);
  384. Py_XDECREF(mview);
  385. Py_XDECREF(structobj);
  386. Py_XDECREF(format);
  387. return ret;
  388. }
  389. static void
  390. copy_rec(const Py_ssize_t *shape, Py_ssize_t ndim, Py_ssize_t itemsize,
  391. char *dptr, const Py_ssize_t *dstrides, const Py_ssize_t *dsuboffsets,
  392. char *sptr, const Py_ssize_t *sstrides, const Py_ssize_t *ssuboffsets,
  393. char *mem)
  394. {
  395. Py_ssize_t i;
  396. assert(ndim >= 1);
  397. if (ndim == 1) {
  398. if (!HAVE_PTR(dsuboffsets) && !HAVE_PTR(ssuboffsets) &&
  399. dstrides[0] == itemsize && sstrides[0] == itemsize) {
  400. memmove(dptr, sptr, shape[0] * itemsize);
  401. }
  402. else {
  403. char *p;
  404. assert(mem != NULL);
  405. for (i=0, p=mem; i<shape[0]; p+=itemsize, sptr+=sstrides[0], i++) {
  406. char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
  407. memcpy(p, xsptr, itemsize);
  408. }
  409. for (i=0, p=mem; i<shape[0]; p+=itemsize, dptr+=dstrides[0], i++) {
  410. char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
  411. memcpy(xdptr, p, itemsize);
  412. }
  413. }
  414. return;
  415. }
  416. for (i = 0; i < shape[0]; dptr+=dstrides[0], sptr+=sstrides[0], i++) {
  417. char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
  418. char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
  419. copy_rec(shape+1, ndim-1, itemsize,
  420. xdptr, dstrides+1, dsuboffsets ? dsuboffsets+1 : NULL,
  421. xsptr, sstrides+1, ssuboffsets ? ssuboffsets+1 : NULL,
  422. mem);
  423. }
  424. }
  425. static int
  426. cmp_structure(Py_buffer *dest, Py_buffer *src)
  427. {
  428. Py_ssize_t i;
  429. if (strcmp(FIX_FORMAT(dest->format), FIX_FORMAT(src->format)) != 0 ||
  430. dest->itemsize != src->itemsize ||
  431. dest->ndim != src->ndim)
  432. return -1;
  433. for (i = 0; i < dest->ndim; i++) {
  434. if (dest->shape[i] != src->shape[i])
  435. return -1;
  436. if (dest->shape[i] == 0)
  437. break;
  438. }
  439. return 0;
  440. }
  441. /* Copy src to dest. Both buffers must have the same format, itemsize,
  442. ndim and shape. Copying is atomic, the function never fails with
  443. a partial copy. */
  444. static int
  445. copy_buffer(Py_buffer *dest, Py_buffer *src)
  446. {
  447. char *mem = NULL;
  448. assert(dest->ndim > 0);
  449. if (cmp_structure(dest, src) < 0) {
  450. PyErr_SetString(PyExc_ValueError,
  451. "ndarray assignment: lvalue and rvalue have different structures");
  452. return -1;
  453. }
  454. if ((dest->suboffsets && dest->suboffsets[dest->ndim-1] >= 0) ||
  455. (src->suboffsets && src->suboffsets[src->ndim-1] >= 0) ||
  456. dest->strides[dest->ndim-1] != dest->itemsize ||
  457. src->strides[src->ndim-1] != src->itemsize) {
  458. mem = PyMem_Malloc(dest->shape[dest->ndim-1] * dest->itemsize);
  459. if (mem == NULL) {
  460. PyErr_NoMemory();
  461. return -1;
  462. }
  463. }
  464. copy_rec(dest->shape, dest->ndim, dest->itemsize,
  465. dest->buf, dest->strides, dest->suboffsets,
  466. src->buf, src->strides, src->suboffsets,
  467. mem);
  468. PyMem_XFree(mem);
  469. return 0;
  470. }
  471. /* Unpack single element */
  472. static PyObject *
  473. unpack_single(char *ptr, const char *fmt, Py_ssize_t itemsize)
  474. {
  475. PyObject *x, *unpack_from, *mview;
  476. if (fmt == NULL) {
  477. fmt = "B";
  478. itemsize = 1;
  479. }
  480. unpack_from = PyObject_GetAttrString(structmodule, "unpack_from");
  481. if (unpack_from == NULL)
  482. return NULL;
  483. mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_READ);
  484. if (mview == NULL) {
  485. Py_DECREF(unpack_from);
  486. return NULL;
  487. }
  488. x = PyObject_CallFunction(unpack_from, "sO", fmt, mview);
  489. Py_DECREF(unpack_from);
  490. Py_DECREF(mview);
  491. if (x == NULL)
  492. return NULL;
  493. if (PyTuple_GET_SIZE(x) == 1) {
  494. PyObject *tmp = PyTuple_GET_ITEM(x, 0);
  495. Py_INCREF(tmp);
  496. Py_DECREF(x);
  497. return tmp;
  498. }
  499. return x;
  500. }
  501. /* Unpack a multi-dimensional matrix into a nested list. Return a scalar
  502. for ndim = 0. */
  503. static PyObject *
  504. unpack_rec(PyObject *unpack_from, char *ptr, PyObject *mview, char *item,
  505. const Py_ssize_t *shape, const Py_ssize_t *strides,
  506. const Py_ssize_t *suboffsets, Py_ssize_t ndim, Py_ssize_t itemsize)
  507. {
  508. PyObject *lst, *x;
  509. Py_ssize_t i;
  510. assert(ndim >= 0);
  511. assert(shape != NULL);
  512. assert(strides != NULL);
  513. if (ndim == 0) {
  514. memcpy(item, ptr, itemsize);
  515. x = PyObject_CallFunctionObjArgs(unpack_from, mview, NULL);
  516. if (x == NULL)
  517. return NULL;
  518. if (PyTuple_GET_SIZE(x) == 1) {
  519. PyObject *tmp = PyTuple_GET_ITEM(x, 0);
  520. Py_INCREF(tmp);
  521. Py_DECREF(x);
  522. return tmp;
  523. }
  524. return x;
  525. }
  526. lst = PyList_New(shape[0]);
  527. if (lst == NULL)
  528. return NULL;
  529. for (i = 0; i < shape[0]; ptr+=strides[0], i++) {
  530. char *nextptr = ADJUST_PTR(ptr, suboffsets);
  531. x = unpack_rec(unpack_from, nextptr, mview, item,
  532. shape+1, strides+1, suboffsets ? suboffsets+1 : NULL,
  533. ndim-1, itemsize);
  534. if (x == NULL) {
  535. Py_DECREF(lst);
  536. return NULL;
  537. }
  538. PyList_SET_ITEM(lst, i, x);
  539. }
  540. return lst;
  541. }
  542. static PyObject *
  543. ndarray_as_list(NDArrayObject *nd)
  544. {
  545. PyObject *structobj = NULL, *unpack_from = NULL;
  546. PyObject *lst = NULL, *mview = NULL;
  547. Py_buffer *base = &nd->head->base;
  548. Py_ssize_t *shape = base->shape;
  549. Py_ssize_t *strides = base->strides;
  550. Py_ssize_t simple_shape[1];
  551. Py_ssize_t simple_strides[1];
  552. char *item = NULL;
  553. PyObject *format;
  554. char *fmt = base->format;
  555. base = &nd->head->base;
  556. if (fmt == NULL) {
  557. PyErr_SetString(PyExc_ValueError,
  558. "ndarray: tolist() does not support format=NULL, use "
  559. "tobytes()");
  560. return NULL;
  561. }
  562. if (shape == NULL) {
  563. assert(ND_C_CONTIGUOUS(nd->head->flags));
  564. assert(base->strides == NULL);
  565. assert(base->ndim <= 1);
  566. shape = simple_shape;
  567. shape[0] = base->len;
  568. strides = simple_strides;
  569. strides[0] = base->itemsize;
  570. }
  571. else if (strides == NULL) {
  572. assert(ND_C_CONTIGUOUS(nd->head->flags));
  573. strides = strides_from_shape(nd->head, 0);
  574. if (strides == NULL)
  575. return NULL;
  576. }
  577. format = PyUnicode_FromString(fmt);
  578. if (format == NULL)
  579. goto out;
  580. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  581. Py_DECREF(format);
  582. if (structobj == NULL)
  583. goto out;
  584. unpack_from = PyObject_GetAttrString(structobj, "unpack_from");
  585. if (unpack_from == NULL)
  586. goto out;
  587. item = PyMem_Malloc(base->itemsize);
  588. if (item == NULL) {
  589. PyErr_NoMemory();
  590. goto out;
  591. }
  592. mview = PyMemoryView_FromMemory(item, base->itemsize, PyBUF_WRITE);
  593. if (mview == NULL)
  594. goto out;
  595. lst = unpack_rec(unpack_from, base->buf, mview, item,
  596. shape, strides, base->suboffsets,
  597. base->ndim, base->itemsize);
  598. out:
  599. Py_XDECREF(mview);
  600. PyMem_XFree(item);
  601. Py_XDECREF(unpack_from);
  602. Py_XDECREF(structobj);
  603. if (strides != base->strides && strides != simple_strides)
  604. PyMem_XFree(strides);
  605. return lst;
  606. }
  607. /****************************************************************************/
  608. /* Initialize ndbuf */
  609. /****************************************************************************/
  610. /*
  611. State of a new ndbuf during initialization. 'OK' means that initialization
  612. is complete. 'PTR' means that a pointer has been initialized, but the
  613. state of the memory is still undefined and ndbuf->offset is disregarded.
  614. +-----------------+-----------+-------------+----------------+
  615. | | ndbuf_new | init_simple | init_structure |
  616. +-----------------+-----------+-------------+----------------+
  617. | next | OK (NULL) | OK | OK |
  618. +-----------------+-----------+-------------+----------------+
  619. | prev | OK (NULL) | OK | OK |
  620. +-----------------+-----------+-------------+----------------+
  621. | len | OK | OK | OK |
  622. +-----------------+-----------+-------------+----------------+
  623. | offset | OK | OK | OK |
  624. +-----------------+-----------+-------------+----------------+
  625. | data | PTR | OK | OK |
  626. +-----------------+-----------+-------------+----------------+
  627. | flags | user | user | OK |
  628. +-----------------+-----------+-------------+----------------+
  629. | exports | OK (0) | OK | OK |
  630. +-----------------+-----------+-------------+----------------+
  631. | base.obj | OK (NULL) | OK | OK |
  632. +-----------------+-----------+-------------+----------------+
  633. | base.buf | PTR | PTR | OK |
  634. +-----------------+-----------+-------------+----------------+
  635. | base.len | len(data) | len(data) | OK |
  636. +-----------------+-----------+-------------+----------------+
  637. | base.itemsize | 1 | OK | OK |
  638. +-----------------+-----------+-------------+----------------+
  639. | base.readonly | 0 | OK | OK |
  640. +-----------------+-----------+-------------+----------------+
  641. | base.format | NULL | OK | OK |
  642. +-----------------+-----------+-------------+----------------+
  643. | base.ndim | 1 | 1 | OK |
  644. +-----------------+-----------+-------------+----------------+
  645. | base.shape | NULL | NULL | OK |
  646. +-----------------+-----------+-------------+----------------+
  647. | base.strides | NULL | NULL | OK |
  648. +-----------------+-----------+-------------+----------------+
  649. | base.suboffsets | NULL | NULL | OK |
  650. +-----------------+-----------+-------------+----------------+
  651. | base.internal | OK | OK | OK |
  652. +-----------------+-----------+-------------+----------------+
  653. */
  654. static Py_ssize_t
  655. get_itemsize(PyObject *format)
  656. {
  657. PyObject *tmp;
  658. Py_ssize_t itemsize;
  659. tmp = PyObject_CallFunctionObjArgs(calcsize, format, NULL);
  660. if (tmp == NULL)
  661. return -1;
  662. itemsize = PyLong_AsSsize_t(tmp);
  663. Py_DECREF(tmp);
  664. return itemsize;
  665. }
  666. static char *
  667. get_format(PyObject *format)
  668. {
  669. PyObject *tmp;
  670. char *fmt;
  671. tmp = PyUnicode_AsASCIIString(format);
  672. if (tmp == NULL)
  673. return NULL;
  674. fmt = PyMem_Malloc(PyBytes_GET_SIZE(tmp)+1);
  675. if (fmt == NULL) {
  676. PyErr_NoMemory();
  677. Py_DECREF(tmp);
  678. return NULL;
  679. }
  680. strcpy(fmt, PyBytes_AS_STRING(tmp));
  681. Py_DECREF(tmp);
  682. return fmt;
  683. }
  684. static int
  685. init_simple(ndbuf_t *ndbuf, PyObject *items, PyObject *format,
  686. Py_ssize_t itemsize)
  687. {
  688. PyObject *mview;
  689. Py_buffer *base = &ndbuf->base;
  690. int ret;
  691. mview = PyMemoryView_FromBuffer(base);
  692. if (mview == NULL)
  693. return -1;
  694. ret = pack_from_list(mview, items, format, itemsize);
  695. Py_DECREF(mview);
  696. if (ret < 0)
  697. return -1;
  698. base->readonly = !(ndbuf->flags & ND_WRITABLE);
  699. base->itemsize = itemsize;
  700. base->format = get_format(format);
  701. if (base->format == NULL)
  702. return -1;
  703. return 0;
  704. }
  705. static Py_ssize_t *
  706. seq_as_ssize_array(PyObject *seq, Py_ssize_t len, int is_shape)
  707. {
  708. Py_ssize_t *dest;
  709. Py_ssize_t x, i;
  710. dest = PyMem_New(Py_ssize_t, len);
  711. if (dest == NULL) {
  712. PyErr_NoMemory();
  713. return NULL;
  714. }
  715. for (i = 0; i < len; i++) {
  716. PyObject *tmp = PySequence_Fast_GET_ITEM(seq, i);
  717. if (!PyLong_Check(tmp)) {
  718. PyErr_Format(PyExc_ValueError,
  719. "elements of %s must be integers",
  720. is_shape ? "shape" : "strides");
  721. PyMem_Free(dest);
  722. return NULL;
  723. }
  724. x = PyLong_AsSsize_t(tmp);
  725. if (PyErr_Occurred()) {
  726. PyMem_Free(dest);
  727. return NULL;
  728. }
  729. if (is_shape && x < 0) {
  730. PyErr_Format(PyExc_ValueError,
  731. "elements of shape must be integers >= 0");
  732. PyMem_Free(dest);
  733. return NULL;
  734. }
  735. dest[i] = x;
  736. }
  737. return dest;
  738. }
  739. static Py_ssize_t *
  740. strides_from_shape(const ndbuf_t *ndbuf, int flags)
  741. {
  742. const Py_buffer *base = &ndbuf->base;
  743. Py_ssize_t *s, i;
  744. s = PyMem_Malloc(base->ndim * (sizeof *s));
  745. if (s == NULL) {
  746. PyErr_NoMemory();
  747. return NULL;
  748. }
  749. if (flags & ND_FORTRAN) {
  750. s[0] = base->itemsize;
  751. for (i = 1; i < base->ndim; i++)
  752. s[i] = s[i-1] * base->shape[i-1];
  753. }
  754. else {
  755. s[base->ndim-1] = base->itemsize;
  756. for (i = base->ndim-2; i >= 0; i--)
  757. s[i] = s[i+1] * base->shape[i+1];
  758. }
  759. return s;
  760. }
  761. /* Bounds check:
  762. len := complete length of allocated memory
  763. offset := start of the array
  764. A single array element is indexed by:
  765. i = indices[0] * strides[0] + indices[1] * strides[1] + ...
  766. imin is reached when all indices[n] combined with positive strides are 0
  767. and all indices combined with negative strides are shape[n]-1, which is
  768. the maximum index for the nth dimension.
  769. imax is reached when all indices[n] combined with negative strides are 0
  770. and all indices combined with positive strides are shape[n]-1.
  771. */
  772. static int
  773. verify_structure(Py_ssize_t len, Py_ssize_t itemsize, Py_ssize_t offset,
  774. const Py_ssize_t *shape, const Py_ssize_t *strides,
  775. Py_ssize_t ndim)
  776. {
  777. Py_ssize_t imin, imax;
  778. Py_ssize_t n;
  779. assert(ndim >= 0);
  780. if (ndim == 0 && (offset < 0 || offset+itemsize > len))
  781. goto invalid_combination;
  782. for (n = 0; n < ndim; n++)
  783. if (strides[n] % itemsize) {
  784. PyErr_SetString(PyExc_ValueError,
  785. "strides must be a multiple of itemsize");
  786. return -1;
  787. }
  788. for (n = 0; n < ndim; n++)
  789. if (shape[n] == 0)
  790. return 0;
  791. imin = imax = 0;
  792. for (n = 0; n < ndim; n++)
  793. if (strides[n] <= 0)
  794. imin += (shape[n]-1) * strides[n];
  795. else
  796. imax += (shape[n]-1) * strides[n];
  797. if (imin + offset < 0 || imax + offset + itemsize > len)
  798. goto invalid_combination;
  799. return 0;
  800. invalid_combination:
  801. PyErr_SetString(PyExc_ValueError,
  802. "invalid combination of buffer, shape and strides");
  803. return -1;
  804. }
  805. /*
  806. Convert a NumPy-style array to an array using suboffsets to stride in
  807. the first dimension. Requirements: ndim > 0.
  808. Contiguous example
  809. ==================
  810. Input:
  811. ------
  812. shape = {2, 2, 3};
  813. strides = {6, 3, 1};
  814. suboffsets = NULL;
  815. data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  816. buf = &data[0]
  817. Output:
  818. -------
  819. shape = {2, 2, 3};
  820. strides = {sizeof(char *), 3, 1};
  821. suboffsets = {0, -1, -1};
  822. data = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  823. | | ^ ^
  824. `---'---' |
  825. | |
  826. `---------------------'
  827. buf = &data[0]
  828. So, in the example the input resembles the three-dimensional array
  829. char v[2][2][3], while the output resembles an array of two pointers
  830. to two-dimensional arrays: char (*v[2])[2][3].
  831. Non-contiguous example:
  832. =======================
  833. Input (with offset and negative strides):
  834. -----------------------------------------
  835. shape = {2, 2, 3};
  836. strides = {-6, 3, -1};
  837. offset = 8
  838. suboffsets = NULL;
  839. data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  840. Output:
  841. -------
  842. shape = {2, 2, 3};
  843. strides = {-sizeof(char *), 3, -1};
  844. suboffsets = {2, -1, -1};
  845. newdata = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  846. | | ^ ^ ^ ^
  847. `---'---' | | `- p2+suboffsets[0]
  848. | `-----------|--- p1+suboffsets[0]
  849. `---------------------'
  850. buf = &newdata[1] # striding backwards over the pointers.
  851. suboffsets[0] is the same as the offset that one would specify if
  852. the two {2, 3} subarrays were created directly, hence the name.
  853. */
  854. static int
  855. init_suboffsets(ndbuf_t *ndbuf)
  856. {
  857. Py_buffer *base = &ndbuf->base;
  858. Py_ssize_t start, step;
  859. Py_ssize_t imin, suboffset0;
  860. Py_ssize_t addsize;
  861. Py_ssize_t n;
  862. char *data;
  863. assert(base->ndim > 0);
  864. assert(base->suboffsets == NULL);
  865. /* Allocate new data with additional space for shape[0] pointers. */
  866. addsize = base->shape[0] * (sizeof (char *));
  867. /* Align array start to a multiple of 8. */
  868. addsize = 8 * ((addsize + 7) / 8);
  869. data = PyMem_Malloc(ndbuf->len + addsize);
  870. if (data == NULL) {
  871. PyErr_NoMemory();
  872. return -1;
  873. }
  874. memcpy(data + addsize, ndbuf->data, ndbuf->len);
  875. PyMem_Free(ndbuf->data);
  876. ndbuf->data = data;
  877. ndbuf->len += addsize;
  878. base->buf = ndbuf->data;
  879. /* imin: minimum index of the input array relative to ndbuf->offset.
  880. suboffset0: offset for each sub-array of the output. This is the
  881. same as calculating -imin' for a sub-array of ndim-1. */
  882. imin = suboffset0 = 0;
  883. for (n = 0; n < base->ndim; n++) {
  884. if (base->shape[n] == 0)
  885. break;
  886. if (base->strides[n] <= 0) {
  887. Py_ssize_t x = (base->shape[n]-1) * base->strides[n];
  888. imin += x;
  889. suboffset0 += (n >= 1) ? -x : 0;
  890. }
  891. }
  892. /* Initialize the array of pointers to the sub-arrays. */
  893. start = addsize + ndbuf->offset + imin;
  894. step = base->strides[0] < 0 ? -base->strides[0] : base->strides[0];
  895. for (n = 0; n < base->shape[0]; n++)
  896. ((char **)base->buf)[n] = (char *)base->buf + start + n*step;
  897. /* Initialize suboffsets. */
  898. base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
  899. if (base->suboffsets == NULL) {
  900. PyErr_NoMemory();
  901. return -1;
  902. }
  903. base->suboffsets[0] = suboffset0;
  904. for (n = 1; n < base->ndim; n++)
  905. base->suboffsets[n] = -1;
  906. /* Adjust strides for the first (zeroth) dimension. */
  907. if (base->strides[0] >= 0) {
  908. base->strides[0] = sizeof(char *);
  909. }
  910. else {
  911. /* Striding backwards. */
  912. base->strides[0] = -(Py_ssize_t)sizeof(char *);
  913. if (base->shape[0] > 0)
  914. base->buf = (char *)base->buf + (base->shape[0]-1) * sizeof(char *);
  915. }
  916. ndbuf->flags &= ~(ND_C|ND_FORTRAN);
  917. ndbuf->offset = 0;
  918. return 0;
  919. }
  920. static void
  921. init_len(Py_buffer *base)
  922. {
  923. Py_ssize_t i;
  924. base->len = 1;
  925. for (i = 0; i < base->ndim; i++)
  926. base->len *= base->shape[i];
  927. base->len *= base->itemsize;
  928. }
  929. static int
  930. init_structure(ndbuf_t *ndbuf, PyObject *shape, PyObject *strides,
  931. Py_ssize_t ndim)
  932. {
  933. Py_buffer *base = &ndbuf->base;
  934. base->ndim = (int)ndim;
  935. if (ndim == 0) {
  936. if (ndbuf->flags & ND_PIL) {
  937. PyErr_SetString(PyExc_TypeError,
  938. "ndim = 0 cannot be used in conjunction with ND_PIL");
  939. return -1;
  940. }
  941. ndbuf->flags |= (ND_SCALAR|ND_C|ND_FORTRAN);
  942. return 0;
  943. }
  944. /* shape */
  945. base->shape = seq_as_ssize_array(shape, ndim, 1);
  946. if (base->shape == NULL)
  947. return -1;
  948. /* strides */
  949. if (strides) {
  950. base->strides = seq_as_ssize_array(strides, ndim, 0);
  951. }
  952. else {
  953. base->strides = strides_from_shape(ndbuf, ndbuf->flags);
  954. }
  955. if (base->strides == NULL)
  956. return -1;
  957. if (verify_structure(base->len, base->itemsize, ndbuf->offset,
  958. base->shape, base->strides, ndim) < 0)
  959. return -1;
  960. /* buf */
  961. base->buf = ndbuf->data + ndbuf->offset;
  962. /* len */
  963. init_len(base);
  964. /* ndbuf->flags */
  965. if (PyBuffer_IsContiguous(base, 'C'))
  966. ndbuf->flags |= ND_C;
  967. if (PyBuffer_IsContiguous(base, 'F'))
  968. ndbuf->flags |= ND_FORTRAN;
  969. /* convert numpy array to suboffset representation */
  970. if (ndbuf->flags & ND_PIL) {
  971. /* modifies base->buf, base->strides and base->suboffsets **/
  972. return init_suboffsets(ndbuf);
  973. }
  974. return 0;
  975. }
  976. static ndbuf_t *
  977. init_ndbuf(PyObject *items, PyObject *shape, PyObject *strides,
  978. Py_ssize_t offset, PyObject *format, int flags)
  979. {
  980. ndbuf_t *ndbuf;
  981. Py_ssize_t ndim;
  982. Py_ssize_t nitems;
  983. Py_ssize_t itemsize;
  984. /* ndim = len(shape) */
  985. CHECK_LIST_OR_TUPLE(shape)
  986. ndim = PySequence_Fast_GET_SIZE(shape);
  987. if (ndim > ND_MAX_NDIM) {
  988. PyErr_Format(PyExc_ValueError,
  989. "ndim must not exceed %d", ND_MAX_NDIM);
  990. return NULL;
  991. }
  992. /* len(strides) = len(shape) */
  993. if (strides) {
  994. CHECK_LIST_OR_TUPLE(strides)
  995. if (PySequence_Fast_GET_SIZE(strides) == 0)
  996. strides = NULL;
  997. else if (flags & ND_FORTRAN) {
  998. PyErr_SetString(PyExc_TypeError,
  999. "ND_FORTRAN cannot be used together with strides");
  1000. return NULL;
  1001. }
  1002. else if (PySequence_Fast_GET_SIZE(strides) != ndim) {
  1003. PyErr_SetString(PyExc_ValueError,
  1004. "len(shape) != len(strides)");
  1005. return NULL;
  1006. }
  1007. }
  1008. /* itemsize */
  1009. itemsize = get_itemsize(format);
  1010. if (itemsize <= 0) {
  1011. if (itemsize == 0) {
  1012. PyErr_SetString(PyExc_ValueError,
  1013. "itemsize must not be zero");
  1014. }
  1015. return NULL;
  1016. }
  1017. /* convert scalar to list */
  1018. if (ndim == 0) {
  1019. items = Py_BuildValue("(O)", items);
  1020. if (items == NULL)
  1021. return NULL;
  1022. }
  1023. else {
  1024. CHECK_LIST_OR_TUPLE(items)
  1025. Py_INCREF(items);
  1026. }
  1027. /* number of items */
  1028. nitems = PySequence_Fast_GET_SIZE(items);
  1029. if (nitems == 0) {
  1030. PyErr_SetString(PyExc_ValueError,
  1031. "initializer list or tuple must not be empty");
  1032. Py_DECREF(items);
  1033. return NULL;
  1034. }
  1035. ndbuf = ndbuf_new(nitems, itemsize, offset, flags);
  1036. if (ndbuf == NULL) {
  1037. Py_DECREF(items);
  1038. return NULL;
  1039. }
  1040. if (init_simple(ndbuf, items, format, itemsize) < 0)
  1041. goto error;
  1042. if (init_structure(ndbuf, shape, strides, ndim) < 0)
  1043. goto error;
  1044. Py_DECREF(items);
  1045. return ndbuf;
  1046. error:
  1047. Py_DECREF(items);
  1048. ndbuf_free(ndbuf);
  1049. return NULL;
  1050. }
  1051. /* initialize and push a new base onto the linked list */
  1052. static int
  1053. ndarray_push_base(NDArrayObject *nd, PyObject *items,
  1054. PyObject *shape, PyObject *strides,
  1055. Py_ssize_t offset, PyObject *format, int flags)
  1056. {
  1057. ndbuf_t *ndbuf;
  1058. ndbuf = init_ndbuf(items, shape, strides, offset, format, flags);
  1059. if (ndbuf == NULL)
  1060. return -1;
  1061. ndbuf_push(nd, ndbuf);
  1062. return 0;
  1063. }
  1064. #define PyBUF_UNUSED 0x10000
  1065. static int
  1066. ndarray_init(PyObject *self, PyObject *args, PyObject *kwds)
  1067. {
  1068. NDArrayObject *nd = (NDArrayObject *)self;
  1069. static char *kwlist[] = {
  1070. "obj", "shape", "strides", "offset", "format", "flags", "getbuf", NULL
  1071. };
  1072. PyObject *v = NULL; /* initializer: scalar, list, tuple or base object */
  1073. PyObject *shape = NULL; /* size of each dimension */
  1074. PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
  1075. Py_ssize_t offset = 0; /* buffer offset */
  1076. PyObject *format = simple_format; /* struct module specifier: "B" */
  1077. int flags = ND_DEFAULT; /* base buffer and ndarray flags */
  1078. int getbuf = PyBUF_UNUSED; /* re-exporter: getbuffer request flags */
  1079. if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|OOnOii", kwlist,
  1080. &v, &shape, &strides, &offset, &format, &flags, &getbuf))
  1081. return -1;
  1082. /* NDArrayObject is re-exporter */
  1083. if (PyObject_CheckBuffer(v) && shape == NULL) {
  1084. if (strides || offset || format != simple_format ||
  1085. !(flags == ND_DEFAULT || flags == ND_REDIRECT)) {
  1086. PyErr_SetString(PyExc_TypeError,
  1087. "construction from exporter object only takes 'obj', 'getbuf' "
  1088. "and 'flags' arguments");
  1089. return -1;
  1090. }
  1091. getbuf = (getbuf == PyBUF_UNUSED) ? PyBUF_FULL_RO : getbuf;
  1092. if (ndarray_init_staticbuf(v, nd, getbuf) < 0)
  1093. return -1;
  1094. init_flags(nd->head);
  1095. nd->head->flags |= flags;
  1096. return 0;
  1097. }
  1098. /* NDArrayObject is the original base object. */
  1099. if (getbuf != PyBUF_UNUSED) {
  1100. PyErr_SetString(PyExc_TypeError,
  1101. "getbuf argument only valid for construction from exporter "
  1102. "object");
  1103. return -1;
  1104. }
  1105. if (shape == NULL) {
  1106. PyErr_SetString(PyExc_TypeError,
  1107. "shape is a required argument when constructing from "
  1108. "list, tuple or scalar");
  1109. return -1;
  1110. }
  1111. if (flags & ND_VAREXPORT) {
  1112. nd->flags |= ND_VAREXPORT;
  1113. flags &= ~ND_VAREXPORT;
  1114. }
  1115. /* Initialize and push the first base buffer onto the linked list. */
  1116. return ndarray_push_base(nd, v, shape, strides, offset, format, flags);
  1117. }
  1118. /* Push an additional base onto the linked list. */
  1119. static PyObject *
  1120. ndarray_push(PyObject *self, PyObject *args, PyObject *kwds)
  1121. {
  1122. NDArrayObject *nd = (NDArrayObject *)self;
  1123. static char *kwlist[] = {
  1124. "items", "shape", "strides", "offset", "format", "flags", NULL
  1125. };
  1126. PyObject *items = NULL; /* initializer: scalar, list or tuple */
  1127. PyObject *shape = NULL; /* size of each dimension */
  1128. PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
  1129. PyObject *format = simple_format; /* struct module specifier: "B" */
  1130. Py_ssize_t offset = 0; /* buffer offset */
  1131. int flags = ND_DEFAULT; /* base buffer flags */
  1132. if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO|OnOi", kwlist,
  1133. &items, &shape, &strides, &offset, &format, &flags))
  1134. return NULL;
  1135. if (flags & ND_VAREXPORT) {
  1136. PyErr_SetString(PyExc_ValueError,
  1137. "ND_VAREXPORT flag can only be used during object creation");
  1138. return NULL;
  1139. }
  1140. if (ND_IS_CONSUMER(nd)) {
  1141. PyErr_SetString(PyExc_BufferError,
  1142. "structure of re-exporting object is immutable");
  1143. return NULL;
  1144. }
  1145. if (!(nd->flags&ND_VAREXPORT) && nd->head->exports > 0) {
  1146. PyErr_Format(PyExc_BufferError,
  1147. "cannot change structure: %zd exported buffer%s",
  1148. nd->head->exports, nd->head->exports==1 ? "" : "s");
  1149. return NULL;
  1150. }
  1151. if (ndarray_push_base(nd, items, shape, strides,
  1152. offset, format, flags) < 0)
  1153. return NULL;
  1154. Py_RETURN_NONE;
  1155. }
  1156. /* Pop a base from the linked list (if possible). */
  1157. static PyObject *
  1158. ndarray_pop(PyObject *self, PyObject *dummy)
  1159. {
  1160. NDArrayObject *nd = (NDArrayObject *)self;
  1161. if (ND_IS_CONSUMER(nd)) {
  1162. PyErr_SetString(PyExc_BufferError,
  1163. "structure of re-exporting object is immutable");
  1164. return NULL;
  1165. }
  1166. if (nd->head->exports > 0) {
  1167. PyErr_Format(PyExc_BufferError,
  1168. "cannot change structure: %zd exported buffer%s",
  1169. nd->head->exports, nd->head->exports==1 ? "" : "s");
  1170. return NULL;
  1171. }
  1172. if (nd->head->next == NULL) {
  1173. PyErr_SetString(PyExc_BufferError,
  1174. "list only has a single base");
  1175. return NULL;
  1176. }
  1177. ndbuf_pop(nd);
  1178. Py_RETURN_NONE;
  1179. }
  1180. /**************************************************************************/
  1181. /* getbuffer */
  1182. /**************************************************************************/
  1183. static int
  1184. ndarray_getbuf(NDArrayObject *self, Py_buffer *view, int flags)
  1185. {
  1186. ndbuf_t *ndbuf = self->head;
  1187. Py_buffer *base = &ndbuf->base;
  1188. int baseflags = ndbuf->flags;
  1189. /* redirect mode */
  1190. if (base->obj != NULL && (baseflags&ND_REDIRECT)) {
  1191. return PyObject_GetBuffer(base->obj, view, flags);
  1192. }
  1193. /* start with complete information */
  1194. *view = *base;
  1195. view->obj = NULL;
  1196. /* reconstruct format */
  1197. if (view->format == NULL)
  1198. view->format = "B";
  1199. if (base->ndim != 0 &&
  1200. ((REQ_SHAPE(flags) && base->shape == NULL) ||
  1201. (REQ_STRIDES(flags) && base->strides == NULL))) {
  1202. /* The ndarray is a re-exporter that has been created without full
  1203. information for testing purposes. In this particular case the
  1204. ndarray is not a PEP-3118 compliant buffer provider. */
  1205. PyErr_SetString(PyExc_BufferError,
  1206. "re-exporter does not provide format, shape or strides");
  1207. return -1;
  1208. }
  1209. if (baseflags & ND_GETBUF_FAIL) {
  1210. PyErr_SetString(PyExc_BufferError,
  1211. "ND_GETBUF_FAIL: forced test exception");
  1212. if (baseflags & ND_GETBUF_UNDEFINED)
  1213. view->obj = (PyObject *)0x1; /* wrong but permitted in <= 3.2 */
  1214. return -1;
  1215. }
  1216. if (REQ_WRITABLE(flags) && base->readonly) {
  1217. PyErr_SetString(PyExc_BufferError,
  1218. "ndarray is not writable");
  1219. return -1;
  1220. }
  1221. if (!REQ_FORMAT(flags)) {
  1222. /* NULL indicates that the buffer's data type has been cast to 'B'.
  1223. view->itemsize is the _previous_ itemsize. If shape is present,
  1224. the equality product(shape) * itemsize = len still holds at this
  1225. point. The equality calcsize(format) = itemsize does _not_ hold
  1226. from here on! */
  1227. view->format = NULL;
  1228. }
  1229. if (REQ_C_CONTIGUOUS(flags) && !ND_C_CONTIGUOUS(baseflags)) {
  1230. PyErr_SetString(PyExc_BufferError,
  1231. "ndarray is not C-contiguous");
  1232. return -1;
  1233. }
  1234. if (REQ_F_CONTIGUOUS(flags) && !ND_FORTRAN_CONTIGUOUS(baseflags)) {
  1235. PyErr_SetString(PyExc_BufferError,
  1236. "ndarray is not Fortran contiguous");
  1237. return -1;
  1238. }
  1239. if (REQ_ANY_CONTIGUOUS(flags) && !ND_ANY_CONTIGUOUS(baseflags)) {
  1240. PyErr_SetString(PyExc_BufferError,
  1241. "ndarray is not contiguous");
  1242. return -1;
  1243. }
  1244. if (!REQ_INDIRECT(flags) && (baseflags & ND_PIL)) {
  1245. PyErr_SetString(PyExc_BufferError,
  1246. "ndarray cannot be represented without suboffsets");
  1247. return -1;
  1248. }
  1249. if (!REQ_STRIDES(flags)) {
  1250. if (!ND_C_CONTIGUOUS(baseflags)) {
  1251. PyErr_SetString(PyExc_BufferError,
  1252. "ndarray is not C-contiguous");
  1253. return -1;
  1254. }
  1255. view->strides = NULL;
  1256. }
  1257. if (!REQ_SHAPE(flags)) {
  1258. /* PyBUF_SIMPLE or PyBUF_WRITABLE: at this point buf is C-contiguous,
  1259. so base->buf = ndbuf->data. */
  1260. if (view->format != NULL) {
  1261. /* PyBUF_SIMPLE|PyBUF_FORMAT and PyBUF_WRITABLE|PyBUF_FORMAT do
  1262. not make sense. */
  1263. PyErr_Format(PyExc_BufferError,
  1264. "ndarray: cannot cast to unsigned bytes if the format flag "
  1265. "is present");
  1266. return -1;
  1267. }
  1268. /* product(shape) * itemsize = len and calcsize(format) = itemsize
  1269. do _not_ hold from here on! */
  1270. view->ndim = 1;
  1271. view->shape = NULL;
  1272. }
  1273. /* Ascertain that the new buffer has the same contiguity as the exporter */
  1274. if (ND_C_CONTIGUOUS(baseflags) != PyBuffer_IsContiguous(view, 'C') ||
  1275. /* skip cast to 1-d */
  1276. (view->format != NULL && view->shape != NULL &&
  1277. ND_FORTRAN_CONTIGUOUS(baseflags) != PyBuffer_IsContiguous(view, 'F')) ||
  1278. /* cast to 1-d */
  1279. (view->format == NULL && view->shape == NULL &&
  1280. !PyBuffer_IsContiguous(view, 'F'))) {
  1281. PyErr_SetString(PyExc_BufferError,
  1282. "ndarray: contiguity mismatch in getbuf()");
  1283. return -1;
  1284. }
  1285. view->obj = (PyObject *)self;
  1286. Py_INCREF(view->obj);
  1287. self->head->exports++;
  1288. return 0;
  1289. }
  1290. static int
  1291. ndarray_releasebuf(NDArrayObject *self, Py_buffer *view)
  1292. {
  1293. if (!ND_IS_CONSUMER(self)) {
  1294. ndbuf_t *ndbuf = view->internal;
  1295. if (--ndbuf->exports == 0 && ndbuf != self->head)
  1296. ndbuf_delete(self, ndbuf);
  1297. }
  1298. return 0;
  1299. }
  1300. static PyBufferProcs ndarray_as_buffer = {
  1301. (getbufferproc)ndarray_getbuf, /* bf_getbuffer */
  1302. (releasebufferproc)ndarray_releasebuf /* bf_releasebuffer */
  1303. };
  1304. /**************************************************************************/
  1305. /* indexing/slicing */
  1306. /**************************************************************************/
  1307. static char *
  1308. ptr_from_index(Py_buffer *base, Py_ssize_t index)
  1309. {
  1310. char *ptr;
  1311. Py_ssize_t nitems; /* items in the first dimension */
  1312. if (base->shape)
  1313. nitems = base->shape[0];
  1314. else {
  1315. assert(base->ndim == 1 && SIMPLE_FORMAT(base->format));
  1316. nitems = base->len;
  1317. }
  1318. if (index < 0) {
  1319. index += nitems;
  1320. }
  1321. if (index < 0 || index >= nitems) {
  1322. PyErr_SetString(PyExc_IndexError, "index out of bounds");
  1323. return NULL;
  1324. }
  1325. ptr = (char *)base->buf;
  1326. if (base->strides == NULL)
  1327. ptr += base->itemsize * index;
  1328. else
  1329. ptr += base->strides[0] * index;
  1330. ptr = ADJUST_PTR(ptr, base->suboffsets);
  1331. return ptr;
  1332. }
  1333. static PyObject *
  1334. ndarray_item(NDArrayObject *self, Py_ssize_t index)
  1335. {
  1336. ndbuf_t *ndbuf = self->head;
  1337. Py_buffer *base = &ndbuf->base;
  1338. char *ptr;
  1339. if (base->ndim == 0) {
  1340. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1341. return NULL;
  1342. }
  1343. ptr = ptr_from_index(base, index);
  1344. if (ptr == NULL)
  1345. return NULL;
  1346. if (base->ndim == 1) {
  1347. return unpack_single(ptr, base->format, base->itemsize);
  1348. }
  1349. else {
  1350. NDArrayObject *nd;
  1351. Py_buffer *subview;
  1352. nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
  1353. if (nd == NULL)
  1354. return NULL;
  1355. if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
  1356. Py_DECREF(nd);
  1357. return NULL;
  1358. }
  1359. subview = &nd->staticbuf.base;
  1360. subview->buf = ptr;
  1361. subview->len /= subview->shape[0];
  1362. subview->ndim--;
  1363. subview->shape++;
  1364. if (subview->strides) subview->strides++;
  1365. if (subview->suboffsets) subview->suboffsets++;
  1366. init_flags(&nd->staticbuf);
  1367. return (PyObject *)nd;
  1368. }
  1369. }
  1370. /*
  1371. For each dimension, we get valid (start, stop, step, slicelength) quadruples
  1372. from PySlice_GetIndicesEx().
  1373. Slicing NumPy arrays
  1374. ====================
  1375. A pointer to an element in a NumPy array is defined by:
  1376. ptr = (char *)buf + indices[0] * strides[0] +
  1377. ... +
  1378. indices[ndim-1] * strides[ndim-1]
  1379. Adjust buf:
  1380. -----------
  1381. Adding start[n] for each dimension effectively adds the constant:
  1382. c = start[0] * strides[0] + ... + start[ndim-1] * strides[ndim-1]
  1383. Therefore init_slice() adds all start[n] directly to buf.
  1384. Adjust shape:
  1385. -------------
  1386. Obviously shape[n] = slicelength[n]
  1387. Adjust strides:
  1388. ---------------
  1389. In the original array, the next element in a dimension is reached
  1390. by adding strides[n] to the pointer. In the sliced array, elements
  1391. may be skipped, so the next element is reached by adding:
  1392. strides[n] * step[n]
  1393. Slicing PIL arrays
  1394. ==================
  1395. Layout:
  1396. -------
  1397. In the first (zeroth) dimension, PIL arrays have an array of pointers
  1398. to sub-arrays of ndim-1. Striding in the first dimension is done by
  1399. getting the index of the nth pointer, dereference it and then add a
  1400. suboffset to it. The arrays pointed to can best be seen a regular
  1401. NumPy arrays.
  1402. Adjust buf:
  1403. -----------
  1404. In the original array, buf points to a location (usually the start)
  1405. in the array of pointers. For the sliced array, start[0] can be
  1406. added to buf in the same manner as for NumPy arrays.
  1407. Adjust suboffsets:
  1408. ------------------
  1409. Due to the dereferencing step in the addressing scheme, it is not
  1410. possible to adjust buf for higher dimensions. Recall that the
  1411. sub-arrays pointed to are regular NumPy arrays, so for each of
  1412. those arrays adding start[n] effectively adds the constant:
  1413. c = start[1] * strides[1] + ... + start[ndim-1] * strides[ndim-1]
  1414. This constant is added to suboffsets[0]. suboffsets[0] in turn is
  1415. added to each pointer right after dereferencing.
  1416. Adjust shape and strides:
  1417. -------------------------
  1418. Shape and strides are not influenced by the dereferencing step, so
  1419. they are adjusted in the same manner as for NumPy arrays.
  1420. Multiple levels of suboffsets
  1421. =============================
  1422. For a construct like an array of pointers to array of pointers to
  1423. sub-arrays of ndim-2:
  1424. suboffsets[0] = start[1] * strides[1]
  1425. suboffsets[1] = start[2] * strides[2] + ...
  1426. */
  1427. static int
  1428. init_slice(Py_buffer *base, PyObject *key, int dim)
  1429. {
  1430. Py_ssize_t start, stop, step, slicelength;
  1431. if (PySlice_GetIndicesEx(key, base->shape[dim],
  1432. &start, &stop, &step, &slicelength) < 0) {
  1433. return -1;
  1434. }
  1435. if (base->suboffsets == NULL || dim == 0) {
  1436. adjust_buf:
  1437. base->buf = (char *)base->buf + base->strides[dim] * start;
  1438. }
  1439. else {
  1440. Py_ssize_t n = dim-1;
  1441. while (n >= 0 && base->suboffsets[n] < 0)
  1442. n--;
  1443. if (n < 0)
  1444. goto adjust_buf; /* all suboffsets are negative */
  1445. base->suboffsets[n] = base->suboffsets[n] + base->strides[dim] * start;
  1446. }
  1447. base->shape[dim] = slicelength;
  1448. base->strides[dim] = base->strides[dim] * step;
  1449. return 0;
  1450. }
  1451. static int
  1452. copy_structure(Py_buffer *base)
  1453. {
  1454. Py_ssize_t *shape = NULL, *strides = NULL, *suboffsets = NULL;
  1455. Py_ssize_t i;
  1456. shape = PyMem_Malloc(base->ndim * (sizeof *shape));
  1457. strides = PyMem_Malloc(base->ndim * (sizeof *strides));
  1458. if (shape == NULL || strides == NULL)
  1459. goto err_nomem;
  1460. suboffsets = NULL;
  1461. if (base->suboffsets) {
  1462. suboffsets = PyMem_Malloc(base->ndim * (sizeof *suboffsets));
  1463. if (suboffsets == NULL)
  1464. goto err_nomem;
  1465. }
  1466. for (i = 0; i < base->ndim; i++) {
  1467. shape[i] = base->shape[i];
  1468. strides[i] = base->strides[i];
  1469. if (suboffsets)
  1470. suboffsets[i] = base->suboffsets[i];
  1471. }
  1472. base->shape = shape;
  1473. base->strides = strides;
  1474. base->suboffsets = suboffsets;
  1475. return 0;
  1476. err_nomem:
  1477. PyErr_NoMemory();
  1478. PyMem_XFree(shape);
  1479. PyMem_XFree(strides);
  1480. PyMem_XFree(suboffsets);
  1481. return -1;
  1482. }
  1483. static PyObject *
  1484. ndarray_subscript(NDArrayObject *self, PyObject *key)
  1485. {
  1486. NDArrayObject *nd;
  1487. ndbuf_t *ndbuf;
  1488. Py_buffer *base = &self->head->base;
  1489. if (base->ndim == 0) {
  1490. if (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0) {
  1491. return unpack_single(base->buf, base->format, base->itemsize);
  1492. }
  1493. else if (key == Py_Ellipsis) {
  1494. Py_INCREF(self);
  1495. return (PyObject *)self;
  1496. }
  1497. else {
  1498. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1499. return NULL;
  1500. }
  1501. }
  1502. if (PyIndex_Check(key)) {
  1503. Py_ssize_t index = PyLong_AsSsize_t(key);
  1504. if (index == -1 && PyErr_Occurred())
  1505. return NULL;
  1506. return ndarray_item(self, index);
  1507. }
  1508. nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
  1509. if (nd == NULL)
  1510. return NULL;
  1511. /* new ndarray is a consumer */
  1512. if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
  1513. Py_DECREF(nd);
  1514. return NULL;
  1515. }
  1516. /* copy shape, strides and suboffsets */
  1517. ndbuf = nd->head;
  1518. base = &ndbuf->base;
  1519. if (copy_structure(base) < 0) {
  1520. Py_DECREF(nd);
  1521. return NULL;
  1522. }
  1523. ndbuf->flags |= ND_OWN_ARRAYS;
  1524. if (PySlice_Check(key)) {
  1525. /* one-dimensional slice */
  1526. if (init_slice(base, key, 0) < 0)
  1527. goto err_occurred;
  1528. }
  1529. else if (PyTuple_Check(key)) {
  1530. /* multi-dimensional slice */
  1531. PyObject *tuple = key;
  1532. Py_ssize_t i, n;
  1533. n = PyTuple_GET_SIZE(tuple);
  1534. for (i = 0; i < n; i++) {
  1535. key = PyTuple_GET_ITEM(tuple, i);
  1536. if (!PySlice_Check(key))
  1537. goto type_error;
  1538. if (init_slice(base, key, (int)i) < 0)
  1539. goto err_occurred;
  1540. }
  1541. }
  1542. else {
  1543. goto type_error;
  1544. }
  1545. init_len(base);
  1546. init_flags(ndbuf);
  1547. return (PyObject *)nd;
  1548. type_error:
  1549. PyErr_Format(PyExc_TypeError,
  1550. "cannot index memory using \"%.200s\"",
  1551. key->ob_type->tp_name);
  1552. err_occurred:
  1553. Py_DECREF(nd);
  1554. return NULL;
  1555. }
  1556. static int
  1557. ndarray_ass_subscript(NDArrayObject *self, PyObject *key, PyObject *value)
  1558. {
  1559. NDArrayObject *nd;
  1560. Py_buffer *dest = &self->head->base;
  1561. Py_buffer src;
  1562. char *ptr;
  1563. Py_ssize_t index;
  1564. int ret = -1;
  1565. if (dest->readonly) {
  1566. PyErr_SetString(PyExc_TypeError, "ndarray is not writable");
  1567. return -1;
  1568. }
  1569. if (value == NULL) {
  1570. PyErr_SetString(PyExc_TypeError, "ndarray data cannot be deleted");
  1571. return -1;
  1572. }
  1573. if (dest->ndim == 0) {
  1574. if (key == Py_Ellipsis ||
  1575. (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0)) {
  1576. ptr = (char *)dest->buf;
  1577. return pack_single(ptr, value, dest->format, dest->itemsize);
  1578. }
  1579. else {
  1580. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1581. return -1;
  1582. }
  1583. }
  1584. if (dest->ndim == 1 && PyIndex_Check(key)) {
  1585. /* rvalue must be a single item */
  1586. index = PyLong_AsSsize_t(key);
  1587. if (index == -1 && PyErr_Occurred())
  1588. return -1;
  1589. else {
  1590. ptr = ptr_from_index(dest, index);
  1591. if (ptr == NULL)
  1592. return -1;
  1593. }
  1594. return pack_single(ptr, value, dest->format, dest->itemsize);
  1595. }
  1596. /* rvalue must be an exporter */
  1597. if (PyObject_GetBuffer(value, &src, PyBUF_FULL_RO) == -1)
  1598. return -1;
  1599. nd = (NDArrayObject *)ndarray_subscript(self, key);
  1600. if (nd != NULL) {
  1601. dest = &nd->head->base;
  1602. ret = copy_buffer(dest, &src);
  1603. Py_DECREF(nd);
  1604. }
  1605. PyBuffer_Release(&src);
  1606. return ret;
  1607. }
  1608. static PyObject *
  1609. slice_indices(PyObject *self, PyObject *args)
  1610. {
  1611. PyObject *ret, *key, *tmp;
  1612. Py_ssize_t s[4]; /* start, stop, step, slicelength */
  1613. Py_ssize_t i, len;
  1614. if (!PyArg_ParseTuple(args, "On", &key, &len)) {
  1615. return NULL;
  1616. }
  1617. if (!PySlice_Check(key)) {
  1618. PyErr_SetString(PyExc_TypeError,
  1619. "first argument must be a slice object");
  1620. return NULL;
  1621. }
  1622. if (PySlice_GetIndicesEx(key, len, &s[0], &s[1], &s[2], &s[3]) < 0) {
  1623. return NULL;
  1624. }
  1625. ret = PyTuple_New(4);
  1626. if (ret == NULL)
  1627. return NULL;
  1628. for (i = 0; i < 4; i++) {
  1629. tmp = PyLong_FromSsize_t(s[i]);
  1630. if (tmp == NULL)
  1631. goto error;
  1632. PyTuple_SET_ITEM(ret, i, tmp);
  1633. }
  1634. return ret;
  1635. error:
  1636. Py_DECREF(ret);
  1637. return NULL;
  1638. }
  1639. static PyMappingMethods ndarray_as_mapping = {
  1640. NULL, /* mp_length */
  1641. (binaryfunc)ndarray_subscript, /* mp_subscript */
  1642. (objobjargproc)ndarray_ass_subscript /* mp_ass_subscript */
  1643. };
  1644. static PySequenceMethods ndarray_as_sequence = {
  1645. 0, /* sq_length */
  1646. 0, /* sq_concat */
  1647. 0, /* sq_repeat */
  1648. (ssizeargfunc)ndarray_item, /* sq_item */
  1649. };
  1650. /**************************************************************************/
  1651. /* getters */
  1652. /**************************************************************************/
  1653. static PyObject *
  1654. ssize_array_as_tuple(Py_ssize_t *array, Py_ssize_t len)
  1655. {
  1656. PyObject *tuple, *x;
  1657. Py_ssize_t i;
  1658. if (array == NULL)
  1659. return PyTuple_New(0);
  1660. tuple = PyTuple_New(len);
  1661. if (tuple == NULL)
  1662. return NULL;
  1663. for (i = 0; i < len; i++) {
  1664. x = PyLong_FromSsize_t(array[i]);
  1665. if (x == NULL) {
  1666. Py_DECREF(tuple);
  1667. return NULL;
  1668. }
  1669. PyTuple_SET_ITEM(tuple, i, x);
  1670. }
  1671. return tuple;
  1672. }
  1673. static PyObject *
  1674. ndarray_get_flags(NDArrayObject *self, void *closure)
  1675. {
  1676. return PyLong_FromLong(self->head->flags);
  1677. }
  1678. static PyObject *
  1679. ndarray_get_offset(NDArrayObject *self, void *closure)
  1680. {
  1681. ndbuf_t *ndbuf = self->head;
  1682. return PyLong_FromSsize_t(ndbuf->offset);
  1683. }
  1684. static PyObject *
  1685. ndarray_get_obj(NDArrayObject *self, void *closure)
  1686. {
  1687. Py_buffer *base = &self->head->base;
  1688. if (base->obj == NULL) {
  1689. Py_RETURN_NONE;
  1690. }
  1691. Py_INCREF(base->obj);
  1692. return base->obj;
  1693. }
  1694. static PyObject *
  1695. ndarray_get_nbytes(NDArrayObject *self, void *closure)
  1696. {
  1697. Py_buffer *base = &self->head->base;
  1698. return PyLong_FromSsize_t(base->len);
  1699. }
  1700. static PyObject *
  1701. ndarray_get_readonly(NDArrayObject *self, void *closure)
  1702. {
  1703. Py_buffer *base = &self->head->base;
  1704. return PyLong_FromLong(base->readonly);
  1705. }
  1706. static PyObject *
  1707. ndarray_get_itemsize(NDArrayObject *self, void *closure)
  1708. {
  1709. Py_buffer *base = &self->head->base;
  1710. return PyLong_FromSsize_t(base->itemsize);
  1711. }
  1712. static PyObject *
  1713. ndarray_get_format(NDArrayObject *self, void *closure)
  1714. {
  1715. Py_buffer *base = &self->head->base;
  1716. char *fmt = base->format ? base->format : "";
  1717. return PyUnicode_FromString(fmt);
  1718. }
  1719. static PyObject *
  1720. ndarray_get_ndim(NDArrayObject *self, void *closure)
  1721. {
  1722. Py_buffer *base = &self->head->base;
  1723. return PyLong_FromSsize_t(base->ndim);
  1724. }
  1725. static PyObject *
  1726. ndarray_get_shape(NDArrayObject *self, void *closure)
  1727. {
  1728. Py_buffer *base = &self->head->base;
  1729. return ssize_array_as_tuple(base->shape, base->ndim);
  1730. }
  1731. static PyObject *
  1732. ndarray_get_strides(NDArrayObject *self, void *closure)
  1733. {
  1734. Py_buffer *base = &self->head->base;
  1735. return ssize_array_as_tuple(base->strides, base->ndim);
  1736. }
  1737. static PyObject *
  1738. ndarray_get_suboffsets(NDArrayObject *self, void *closure)
  1739. {
  1740. Py_buffer *base = &self->head->base;
  1741. return ssize_array_as_tuple(base->suboffsets, base->ndim);
  1742. }
  1743. static PyObject *
  1744. ndarray_c_contig(PyObject *self, PyObject *dummy)
  1745. {
  1746. NDArrayObject *nd = (NDArrayObject *)self;
  1747. int ret = PyBuffer_IsContiguous(&nd->head->base, 'C');
  1748. if (ret != ND_C_CONTIGUOUS(nd->head->flags)) {
  1749. PyErr_SetString(PyExc_RuntimeError,
  1750. "results from PyBuffer_IsContiguous() and flags differ");
  1751. return NULL;
  1752. }
  1753. return PyBool_FromLong(ret);
  1754. }
  1755. static PyObject *
  1756. ndarray_fortran_contig(PyObject *self, PyObject *dummy)
  1757. {
  1758. NDArrayObject *nd = (NDArrayObject *)self;
  1759. int ret = PyBuffer_IsContiguous(&nd->head->base, 'F');
  1760. if (ret != ND_FORTRAN_CONTIGUOUS(nd->head->flags)) {
  1761. PyErr_SetString(PyExc_RuntimeError,
  1762. "results from PyBuffer_IsContiguous() and flags differ");
  1763. return NULL;
  1764. }
  1765. return PyBool_FromLong(ret);
  1766. }
  1767. static PyObject *
  1768. ndarray_contig(PyObject *self, PyObject *dummy)
  1769. {
  1770. NDArrayObject *nd = (NDArrayObject *)self;
  1771. int ret = PyBuffer_IsContiguous(&nd->head->base, 'A');
  1772. if (ret != ND_ANY_CONTIGUOUS(nd->head->flags)) {
  1773. PyErr_SetString(PyExc_RuntimeError,
  1774. "results from PyBuffer_IsContiguous() and flags differ");
  1775. return NULL;
  1776. }
  1777. return PyBool_FromLong(ret);
  1778. }
  1779. static PyGetSetDef ndarray_getset [] =
  1780. {
  1781. /* ndbuf */
  1782. { "flags", (getter)ndarray_get_flags, NULL, NULL, NULL},
  1783. { "offset", (getter)ndarray_get_offset, NULL, NULL, NULL},
  1784. /* ndbuf.base */
  1785. { "obj", (getter)ndarray_get_obj, NULL, NULL, NULL},
  1786. { "nbytes", (getter)ndarray_get_nbytes, NULL, NULL, NULL},
  1787. { "readonly", (getter)ndarray_get_readonly, NULL, NULL, NULL},
  1788. { "itemsize", (getter)ndarray_get_itemsize, NULL, NULL, NULL},
  1789. { "format", (getter)ndarray_get_format, NULL, NULL, NULL},
  1790. { "ndim", (getter)ndarray_get_ndim, NULL, NULL, NULL},
  1791. { "shape", (getter)ndarray_get_shape, NULL, NULL, NULL},
  1792. { "strides", (getter)ndarray_get_strides, NULL, NULL, NULL},
  1793. { "suboffsets", (getter)ndarray_get_suboffsets, NULL, NULL, NULL},
  1794. { "c_contiguous", (getter)ndarray_c_contig, NULL, NULL, NULL},
  1795. { "f_contiguous", (getter)ndarray_fortran_contig, NULL, NULL, NULL},
  1796. { "contiguous", (getter)ndarray_contig, NULL, NULL, NULL},
  1797. {NULL}
  1798. };
  1799. static PyObject *
  1800. ndarray_tolist(PyObject *self, PyObject *dummy)
  1801. {
  1802. return ndarray_as_list((NDArrayObject *)self);
  1803. }
  1804. static PyObject *
  1805. ndarray_tobytes(PyObject *self, PyObject *dummy)
  1806. {
  1807. ndbuf_t *ndbuf = ((NDArrayObject *)self)->head;
  1808. Py_buffer *src = &ndbuf->base;
  1809. Py_buffer dest;
  1810. PyObject *ret = NULL;
  1811. char *mem;
  1812. if (ND_C_CONTIGUOUS(ndbuf->flags))
  1813. return PyBytes_FromStringAndSize(src->buf, src->len);
  1814. assert(src->shape != NULL);
  1815. assert(src->strides != NULL);
  1816. assert(src->ndim > 0);
  1817. mem = PyMem_Malloc(src->len);
  1818. if (mem == NULL) {
  1819. PyErr_NoMemory();
  1820. return NULL;
  1821. }
  1822. dest = *src;
  1823. dest.buf = mem;
  1824. dest.suboffsets = NULL;
  1825. dest.strides = strides_from_shape(ndbuf, 0);
  1826. if (dest.strides == NULL)
  1827. goto out;
  1828. if (copy_buffer(&dest, src) < 0)
  1829. goto out;
  1830. ret = PyBytes_FromStringAndSize(mem, src->len);
  1831. out:
  1832. PyMem_XFree(dest.strides);
  1833. PyMem_Free(mem);
  1834. return ret;
  1835. }
  1836. /* add redundant (negative) suboffsets for testing */
  1837. static PyObject *
  1838. ndarray_add_suboffsets(PyObject *self, PyObject *dummy)
  1839. {
  1840. NDArrayObject *nd = (NDArrayObject *)self;
  1841. Py_buffer *base = &nd->head->base;
  1842. Py_ssize_t i;
  1843. if (base->suboffsets != NULL) {
  1844. PyErr_SetString(PyExc_TypeError,
  1845. "cannot add suboffsets to PIL-style array");
  1846. return NULL;
  1847. }
  1848. if (base->strides == NULL) {
  1849. PyErr_SetString(PyExc_TypeError,
  1850. "cannot add suboffsets to array without strides");
  1851. return NULL;
  1852. }
  1853. base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
  1854. if (base->suboffsets == NULL) {
  1855. PyErr_NoMemory();
  1856. return NULL;
  1857. }
  1858. for (i = 0; i < base->ndim; i++)
  1859. base->suboffsets[i] = -1;
  1860. nd->head->flags &= ~(ND_C|ND_FORTRAN);
  1861. Py_RETURN_NONE;
  1862. }
  1863. /* Test PyMemoryView_FromBuffer(): return a memoryview from a static buffer.
  1864. Obviously this is fragile and only one such view may be active at any
  1865. time. Never use anything like this in real code! */
  1866. static char *infobuf = NULL;
  1867. static PyObject *
  1868. ndarray_memoryview_from_buffer(PyObject *self, PyObject *dummy)
  1869. {
  1870. const NDArrayObject *nd = (NDArrayObject *)self;
  1871. const Py_buffer *view = &nd->head->base;
  1872. const ndbuf_t *ndbuf;
  1873. static char format[ND_MAX_NDIM+1];
  1874. static Py_ssize_t shape[ND_MAX_NDIM];
  1875. static Py_ssize_t strides[ND_MAX_NDIM];
  1876. static Py_ssize_t suboffsets[ND_MAX_NDIM];
  1877. static Py_buffer info;
  1878. char *p;
  1879. if (!ND_IS_CONSUMER(nd))
  1880. ndbuf = nd->head; /* self is ndarray/original exporter */
  1881. else if (NDArray_Check(view->obj) && !ND_IS_CONSUMER(view->obj))
  1882. /* self is ndarray and consumer from ndarray/original exporter */
  1883. ndbuf = ((NDArrayObject *)view->obj)->head;
  1884. else {
  1885. PyErr_SetString(PyExc_TypeError,
  1886. "memoryview_from_buffer(): ndarray must be original exporter or "
  1887. "consumer from ndarray/original exporter");
  1888. return NULL;
  1889. }
  1890. info = *view;
  1891. p = PyMem_Realloc(infobuf, ndbuf->len);
  1892. if (p == NULL) {
  1893. PyMem_Free(infobuf);
  1894. PyErr_NoMemory();
  1895. infobuf = NULL;
  1896. return NULL;
  1897. }
  1898. else {
  1899. infobuf = p;
  1900. }
  1901. /* copy the complete raw data */
  1902. memcpy(infobuf, ndbuf->data, ndbuf->len);
  1903. info.buf = infobuf + ((char *)view->buf - ndbuf->data);
  1904. if (view->format) {
  1905. if (strlen(view->format) > ND_MAX_NDIM) {
  1906. PyErr_Format(PyExc_TypeError,
  1907. "memoryview_from_buffer: format is limited to %d characters",
  1908. ND_MAX_NDIM);
  1909. return NULL;
  1910. }
  1911. strcpy(format, view->format);
  1912. info.format = format;
  1913. }
  1914. if (view->ndim > ND_MAX_NDIM) {
  1915. PyErr_Format(PyExc_TypeError,
  1916. "memoryview_from_buffer: ndim is limited to %d", ND_MAX_NDIM);
  1917. return NULL;
  1918. }
  1919. if (view->shape) {
  1920. memcpy(shape, view->shape, view->ndim * sizeof(Py_ssize_t));
  1921. info.shape = shape;
  1922. }
  1923. if (view->strides) {
  1924. memcpy(strides, view->strides, view->ndim * sizeof(Py_ssize_t));
  1925. info.strides = strides;
  1926. }
  1927. if (view->suboffsets) {
  1928. memcpy(suboffsets, view->suboffsets, view->ndim * sizeof(Py_ssize_t));
  1929. info.suboffsets = suboffsets;
  1930. }
  1931. return PyMemoryView_FromBuffer(&info);
  1932. }
  1933. /* Get a single item from bufobj at the location specified by seq.
  1934. seq is a list or tuple of indices. The purpose of this function
  1935. is to check other functions against PyBuffer_GetPointer(). */
  1936. static PyObject *
  1937. get_pointer(PyObject *self, PyObject *args)
  1938. {
  1939. PyObject *ret = NULL, *bufobj, *seq;
  1940. Py_buffer view;
  1941. Py_ssize_t indices[ND_MAX_NDIM];
  1942. Py_ssize_t i;
  1943. void *ptr;
  1944. if (!PyArg_ParseTuple(args, "OO", &bufobj, &seq)) {
  1945. return NULL;
  1946. }
  1947. CHECK_LIST_OR_TUPLE(seq);
  1948. if (PyObject_GetBuffer(bufobj, &view, PyBUF_FULL_RO) < 0)
  1949. return NULL;
  1950. if (view.ndim > ND_MAX_NDIM) {
  1951. PyErr_Format(PyExc_ValueError,
  1952. "get_pointer(): ndim > %d", ND_MAX_NDIM);
  1953. goto out;
  1954. }
  1955. if (PySequence_Fast_GET_SIZE(seq) != view.ndim) {
  1956. PyErr_SetString(PyExc_ValueError,
  1957. "get_pointer(): len(indices) != ndim");
  1958. goto out;
  1959. }
  1960. for (i = 0; i < view.ndim; i++) {
  1961. PyObject *x = PySequence_Fast_GET_ITEM(seq, i);
  1962. indices[i] = PyLong_AsSsize_t(x);
  1963. if (PyErr_Occurred())
  1964. goto out;
  1965. if (indices[i] < 0 || indices[i] >= view.shape[i]) {
  1966. PyErr_Format(PyExc_ValueError,
  1967. "get_pointer(): invalid index %zd at position %zd",
  1968. indices[i], i);
  1969. goto out;
  1970. }
  1971. }
  1972. ptr = PyBuffer_GetPointer(&view, indices);
  1973. ret = unpack_single(ptr, view.format, view.itemsize);
  1974. out:
  1975. PyBuffer_Release(&view);
  1976. return ret;
  1977. }
  1978. static PyObject *
  1979. get_sizeof_void_p(PyObject *self)
  1980. {
  1981. return PyLong_FromSize_t(sizeof(void *));
  1982. }
  1983. static char
  1984. get_ascii_order(PyObject *order)
  1985. {
  1986. PyObject *ascii_order;
  1987. char ord;
  1988. if (!PyUnicode_Check(order)) {
  1989. PyErr_SetString(PyExc_TypeError,
  1990. "order must be a string");
  1991. return CHAR_MAX;
  1992. }
  1993. ascii_order = PyUnicode_AsASCIIString(order);
  1994. if (ascii_order == NULL) {
  1995. return CHAR_MAX;
  1996. }
  1997. ord = PyBytes_AS_STRING(ascii_order)[0];
  1998. Py_DECREF(ascii_order);
  1999. if (ord != 'C' && ord != 'F' && ord != 'A') {
  2000. PyErr_SetString(PyExc_ValueError,
  2001. "invalid order, must be C, F or A");
  2002. return CHAR_MAX;
  2003. }
  2004. return ord;
  2005. }
  2006. /* Get a contiguous memoryview. */
  2007. static PyObject *
  2008. get_contiguous(PyObject *self, PyObject *args)
  2009. {
  2010. PyObject *obj;
  2011. PyObject *buffertype;
  2012. PyObject *order;
  2013. long type;
  2014. char ord;
  2015. if (!PyArg_ParseTuple(args, "OOO", &obj, &buffertype, &order)) {
  2016. return NULL;
  2017. }
  2018. if (!PyLong_Check(buffertype)) {
  2019. PyErr_SetString(PyExc_TypeError,
  2020. "buffertype must be PyBUF_READ or PyBUF_WRITE");
  2021. return NULL;
  2022. }
  2023. type = PyLong_AsLong(buffertype);
  2024. if (type == -1 && PyErr_Occurred()) {
  2025. return NULL;
  2026. }
  2027. if (type != PyBUF_READ && type != PyBUF_WRITE) {
  2028. PyErr_SetString(PyExc_ValueError,
  2029. "invalid buffer type");
  2030. return NULL;
  2031. }
  2032. ord = get_ascii_order(order);
  2033. if (ord == CHAR_MAX)
  2034. return NULL;
  2035. return PyMemoryView_GetContiguous(obj, (int)type, ord);
  2036. }
  2037. /* PyBuffer_ToContiguous() */
  2038. static PyObject *
  2039. py_buffer_to_contiguous(PyObject *self, PyObject *args)
  2040. {
  2041. PyObject *obj;
  2042. PyObject *order;
  2043. PyObject *ret = NULL;
  2044. int flags;
  2045. char ord;
  2046. Py_buffer view;
  2047. char *buf = NULL;
  2048. if (!PyArg_ParseTuple(args, "OOi", &obj, &order, &flags)) {
  2049. return NULL;
  2050. }
  2051. if (PyObject_GetBuffer(obj, &view, flags) < 0) {
  2052. return NULL;
  2053. }
  2054. ord = get_ascii_order(order);
  2055. if (ord == CHAR_MAX) {
  2056. goto out;
  2057. }
  2058. buf = PyMem_Malloc(view.len);
  2059. if (buf == NULL) {
  2060. PyErr_NoMemory();
  2061. goto out;
  2062. }
  2063. if (PyBuffer_ToContiguous(buf, &view, view.len, ord) < 0) {
  2064. goto out;
  2065. }
  2066. ret = PyBytes_FromStringAndSize(buf, view.len);
  2067. out:
  2068. PyBuffer_Release(&view);
  2069. PyMem_XFree(buf);
  2070. return ret;
  2071. }
  2072. static int
  2073. fmtcmp(const char *fmt1, const char *fmt2)
  2074. {
  2075. if (fmt1 == NULL) {
  2076. return fmt2 == NULL || strcmp(fmt2, "B") == 0;
  2077. }
  2078. if (fmt2 == NULL) {
  2079. return fmt1 == NULL || strcmp(fmt1, "B") == 0;
  2080. }
  2081. return strcmp(fmt1, fmt2) == 0;
  2082. }
  2083. static int
  2084. arraycmp(const Py_ssize_t *a1, const Py_ssize_t *a2, const Py_ssize_t *shape,
  2085. Py_ssize_t ndim)
  2086. {
  2087. Py_ssize_t i;
  2088. for (i = 0; i < ndim; i++) {
  2089. if (shape && shape[i] <= 1) {
  2090. /* strides can differ if the dimension is less than 2 */
  2091. continue;
  2092. }
  2093. if (a1[i] != a2[i]) {
  2094. return 0;
  2095. }
  2096. }
  2097. return 1;
  2098. }
  2099. /* Compare two contiguous buffers for physical equality. */
  2100. static PyObject *
  2101. cmp_contig(PyObject *self, PyObject *args)
  2102. {
  2103. PyObject *b1, *b2; /* buffer objects */
  2104. Py_buffer v1, v2;
  2105. PyObject *ret;
  2106. int equal = 0;
  2107. if (!PyArg_ParseTuple(args, "OO", &b1, &b2)) {
  2108. return NULL;
  2109. }
  2110. if (PyObject_GetBuffer(b1, &v1, PyBUF_FULL_RO) < 0) {
  2111. PyErr_SetString(PyExc_TypeError,
  2112. "cmp_contig: first argument does not implement the buffer "
  2113. "protocol");
  2114. return NULL;
  2115. }
  2116. if (PyObject_GetBuffer(b2, &v2, PyBUF_FULL_RO) < 0) {
  2117. PyErr_SetString(PyExc_TypeError,
  2118. "cmp_contig: second argument does not implement the buffer "
  2119. "protocol");
  2120. PyBuffer_Release(&v1);
  2121. return NULL;
  2122. }
  2123. if (!(PyBuffer_IsContiguous(&v1, 'C')&&PyBuffer_IsContiguous(&v2, 'C')) &&
  2124. !(PyBuffer_IsContiguous(&v1, 'F')&&PyBuffer_IsContiguous(&v2, 'F'))) {
  2125. goto result;
  2126. }
  2127. /* readonly may differ if created from non-contiguous */
  2128. if (v1.len != v2.len ||
  2129. v1.itemsize != v2.itemsize ||
  2130. v1.ndim != v2.ndim ||
  2131. !fmtcmp(v1.format, v2.format) ||
  2132. !!v1.shape != !!v2.shape ||
  2133. !!v1.strides != !!v2.strides ||
  2134. !!v1.suboffsets != !!v2.suboffsets) {
  2135. goto result;
  2136. }
  2137. if ((v1.shape && !arraycmp(v1.shape, v2.shape, NULL, v1.ndim)) ||
  2138. (v1.strides && !arraycmp(v1.strides, v2.strides, v1.shape, v1.ndim)) ||
  2139. (v1.suboffsets && !arraycmp(v1.suboffsets, v2.suboffsets, NULL,
  2140. v1.ndim))) {
  2141. goto result;
  2142. }
  2143. if (memcmp((char *)v1.buf, (char *)v2.buf, v1.len) != 0) {
  2144. goto result;
  2145. }
  2146. equal = 1;
  2147. result:
  2148. PyBuffer_Release(&v1);
  2149. PyBuffer_Release(&v2);
  2150. ret = equal ? Py_True : Py_False;
  2151. Py_INCREF(ret);
  2152. return ret;
  2153. }
  2154. static PyObject *
  2155. is_contiguous(PyObject *self, PyObject *args)
  2156. {
  2157. PyObject *obj;
  2158. PyObject *order;
  2159. PyObject *ret = NULL;
  2160. Py_buffer view, *base;
  2161. char ord;
  2162. if (!PyArg_ParseTuple(args, "OO", &obj, &order)) {
  2163. return NULL;
  2164. }
  2165. ord = get_ascii_order(order);
  2166. if (ord == CHAR_MAX) {
  2167. return NULL;
  2168. }
  2169. if (NDArray_Check(obj)) {
  2170. /* Skip the buffer protocol to check simple etc. buffers directly. */
  2171. base = &((NDArrayObject *)obj)->head->base;
  2172. ret = PyBuffer_IsContiguous(base, ord) ? Py_True : Py_False;
  2173. }
  2174. else {
  2175. if (PyObject_GetBuffer(obj, &view, PyBUF_FULL_RO) < 0) {
  2176. PyErr_SetString(PyExc_TypeError,
  2177. "is_contiguous: object does not implement the buffer "
  2178. "protocol");
  2179. return NULL;
  2180. }
  2181. ret = PyBuffer_IsContiguous(&view, ord) ? Py_True : Py_False;
  2182. PyBuffer_Release(&view);
  2183. }
  2184. Py_INCREF(ret);
  2185. return ret;
  2186. }
  2187. static Py_hash_t
  2188. ndarray_hash(PyObject *self)
  2189. {
  2190. const NDArrayObject *nd = (NDArrayObject *)self;
  2191. const Py_buffer *view = &nd->head->base;
  2192. PyObject *bytes;
  2193. Py_hash_t hash;
  2194. if (!view->readonly) {
  2195. PyErr_SetString(PyExc_ValueError,
  2196. "cannot hash writable ndarray object");
  2197. return -1;
  2198. }
  2199. if (view->obj != NULL && PyObject_Hash(view->obj) == -1) {
  2200. return -1;
  2201. }
  2202. bytes = ndarray_tobytes(self, NULL);
  2203. if (bytes == NULL) {
  2204. return -1;
  2205. }
  2206. hash = PyObject_Hash(bytes);
  2207. Py_DECREF(bytes);
  2208. return hash;
  2209. }
  2210. static PyMethodDef ndarray_methods [] =
  2211. {
  2212. { "tolist", ndarray_tolist, METH_NOARGS, NULL },
  2213. { "tobytes", ndarray_tobytes, METH_NOARGS, NULL },
  2214. { "push", (PyCFunction)ndarray_push, METH_VARARGS|METH_KEYWORDS, NULL },
  2215. { "pop", ndarray_pop, METH_NOARGS, NULL },
  2216. { "add_suboffsets", ndarray_add_suboffsets, METH_NOARGS, NULL },
  2217. { "memoryview_from_buffer", ndarray_memoryview_from_buffer, METH_NOARGS, NULL },
  2218. {NULL}
  2219. };
  2220. static PyTypeObject NDArray_Type = {
  2221. PyVarObject_HEAD_INIT(NULL, 0)
  2222. "ndarray", /* Name of this type */
  2223. sizeof(NDArrayObject), /* Basic object size */
  2224. 0, /* Item size for varobject */
  2225. (destructor)ndarray_dealloc, /* tp_dealloc */
  2226. 0, /* tp_print */
  2227. 0, /* tp_getattr */
  2228. 0, /* tp_setattr */
  2229. 0, /* tp_compare */
  2230. 0, /* tp_repr */
  2231. 0, /* tp_as_number */
  2232. &ndarray_as_sequence, /* tp_as_sequence */
  2233. &ndarray_as_mapping, /* tp_as_mapping */
  2234. (hashfunc)ndarray_hash, /* tp_hash */
  2235. 0, /* tp_call */
  2236. 0, /* tp_str */
  2237. PyObject_GenericGetAttr, /* tp_getattro */
  2238. 0, /* tp_setattro */
  2239. &ndarray_as_buffer, /* tp_as_buffer */
  2240. Py_TPFLAGS_DEFAULT, /* tp_flags */
  2241. 0, /* tp_doc */
  2242. 0, /* tp_traverse */
  2243. 0, /* tp_clear */
  2244. 0, /* tp_richcompare */
  2245. 0, /* tp_weaklistoffset */
  2246. 0, /* tp_iter */
  2247. 0, /* tp_iternext */
  2248. ndarray_methods, /* tp_methods */
  2249. 0, /* tp_members */
  2250. ndarray_getset, /* tp_getset */
  2251. 0, /* tp_base */
  2252. 0, /* tp_dict */
  2253. 0, /* tp_descr_get */
  2254. 0, /* tp_descr_set */
  2255. 0, /* tp_dictoffset */
  2256. ndarray_init, /* tp_init */
  2257. 0, /* tp_alloc */
  2258. ndarray_new, /* tp_new */
  2259. };
  2260. /**************************************************************************/
  2261. /* StaticArray Object */
  2262. /**************************************************************************/
  2263. static PyTypeObject StaticArray_Type;
  2264. typedef struct {
  2265. PyObject_HEAD
  2266. int legacy_mode; /* if true, use the view.obj==NULL hack */
  2267. } StaticArrayObject;
  2268. static char static_mem[12] = {0,1,2,3,4,5,6,7,8,9,10,11};
  2269. static Py_ssize_t static_shape[1] = {12};
  2270. static Py_ssize_t static_strides[1] = {1};
  2271. static Py_buffer static_buffer = {
  2272. static_mem, /* buf */
  2273. NULL, /* obj */
  2274. 12, /* len */
  2275. 1, /* itemsize */
  2276. 1, /* readonly */
  2277. 1, /* ndim */
  2278. "B", /* format */
  2279. static_shape, /* shape */
  2280. static_strides, /* strides */
  2281. NULL, /* suboffsets */
  2282. NULL /* internal */
  2283. };
  2284. static PyObject *
  2285. staticarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
  2286. {
  2287. return (PyObject *)PyObject_New(StaticArrayObject, &StaticArray_Type);
  2288. }
  2289. static int
  2290. staticarray_init(PyObject *self, PyObject *args, PyObject *kwds)
  2291. {
  2292. StaticArrayObject *a = (StaticArrayObject *)self;
  2293. static char *kwlist[] = {
  2294. "legacy_mode", NULL
  2295. };
  2296. PyObject *legacy_mode = Py_False;
  2297. if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O", kwlist, &legacy_mode))
  2298. return -1;
  2299. a->legacy_mode = (legacy_mode != Py_False);
  2300. return 0;
  2301. }
  2302. static void
  2303. staticarray_dealloc(StaticArrayObject *self)
  2304. {
  2305. PyObject_Del(self);
  2306. }
  2307. /* Return a buffer for a PyBUF_FULL_RO request. Flags are not checked,
  2308. which makes this object a non-compliant exporter! */
  2309. static int
  2310. staticarray_getbuf(StaticArrayObject *self, Py_buffer *view, int flags)
  2311. {
  2312. *view = static_buffer;
  2313. if (self->legacy_mode) {
  2314. view->obj = NULL; /* Don't use this in new code. */
  2315. }
  2316. else {
  2317. view->obj = (PyObject *)self;
  2318. Py_INCREF(view->obj);
  2319. }
  2320. return 0;
  2321. }
  2322. static PyBufferProcs staticarray_as_buffer = {
  2323. (getbufferproc)staticarray_getbuf, /* bf_getbuffer */
  2324. NULL, /* bf_releasebuffer */
  2325. };
  2326. static PyTypeObject StaticArray_Type = {
  2327. PyVarObject_HEAD_INIT(NULL, 0)
  2328. "staticarray", /* Name of this type */
  2329. sizeof(StaticArrayObject), /* Basic object size */
  2330. 0, /* Item size for varobject */
  2331. (destructor)staticarray_dealloc, /* tp_dealloc */
  2332. 0, /* tp_print */
  2333. 0, /* tp_getattr */
  2334. 0, /* tp_setattr */
  2335. 0, /* tp_compare */
  2336. 0, /* tp_repr */
  2337. 0, /* tp_as_number */
  2338. 0, /* tp_as_sequence */
  2339. 0, /* tp_as_mapping */
  2340. 0, /* tp_hash */
  2341. 0, /* tp_call */
  2342. 0, /* tp_str */
  2343. 0, /* tp_getattro */
  2344. 0, /* tp_setattro */
  2345. &staticarray_as_buffer, /* tp_as_buffer */
  2346. Py_TPFLAGS_DEFAULT, /* tp_flags */
  2347. 0, /* tp_doc */
  2348. 0, /* tp_traverse */
  2349. 0, /* tp_clear */
  2350. 0, /* tp_richcompare */
  2351. 0, /* tp_weaklistoffset */
  2352. 0, /* tp_iter */
  2353. 0, /* tp_iternext */
  2354. 0, /* tp_methods */
  2355. 0, /* tp_members */
  2356. 0, /* tp_getset */
  2357. 0, /* tp_base */
  2358. 0, /* tp_dict */
  2359. 0, /* tp_descr_get */
  2360. 0, /* tp_descr_set */
  2361. 0, /* tp_dictoffset */
  2362. staticarray_init, /* tp_init */
  2363. 0, /* tp_alloc */
  2364. staticarray_new, /* tp_new */
  2365. };
  2366. static struct PyMethodDef _testbuffer_functions[] = {
  2367. {"slice_indices", slice_indices, METH_VARARGS, NULL},
  2368. {"get_pointer", get_pointer, METH_VARARGS, NULL},
  2369. {"get_sizeof_void_p", (PyCFunction)get_sizeof_void_p, METH_NOARGS, NULL},
  2370. {"get_contiguous", get_contiguous, METH_VARARGS, NULL},
  2371. {"py_buffer_to_contiguous", py_buffer_to_contiguous, METH_VARARGS, NULL},
  2372. {"is_contiguous", is_contiguous, METH_VARARGS, NULL},
  2373. {"cmp_contig", cmp_contig, METH_VARARGS, NULL},
  2374. {NULL, NULL}
  2375. };
  2376. static struct PyModuleDef _testbuffermodule = {
  2377. PyModuleDef_HEAD_INIT,
  2378. "_testbuffer",
  2379. NULL,
  2380. -1,
  2381. _testbuffer_functions,
  2382. NULL,
  2383. NULL,
  2384. NULL,
  2385. NULL
  2386. };
  2387. PyMODINIT_FUNC
  2388. PyInit__testbuffer(void)
  2389. {
  2390. PyObject *m;
  2391. m = PyModule_Create(&_testbuffermodule);
  2392. if (m == NULL)
  2393. return NULL;
  2394. Py_TYPE(&NDArray_Type) = &PyType_Type;
  2395. Py_INCREF(&NDArray_Type);
  2396. PyModule_AddObject(m, "ndarray", (PyObject *)&NDArray_Type);
  2397. Py_TYPE(&StaticArray_Type) = &PyType_Type;
  2398. Py_INCREF(&StaticArray_Type);
  2399. PyModule_AddObject(m, "staticarray", (PyObject *)&StaticArray_Type);
  2400. structmodule = PyImport_ImportModule("struct");
  2401. if (structmodule == NULL)
  2402. return NULL;
  2403. Struct = PyObject_GetAttrString(structmodule, "Struct");
  2404. calcsize = PyObject_GetAttrString(structmodule, "calcsize");
  2405. if (Struct == NULL || calcsize == NULL)
  2406. return NULL;
  2407. simple_format = PyUnicode_FromString(simple_fmt);
  2408. if (simple_format == NULL)
  2409. return NULL;
  2410. PyModule_AddIntMacro(m, ND_MAX_NDIM);
  2411. PyModule_AddIntMacro(m, ND_VAREXPORT);
  2412. PyModule_AddIntMacro(m, ND_WRITABLE);
  2413. PyModule_AddIntMacro(m, ND_FORTRAN);
  2414. PyModule_AddIntMacro(m, ND_SCALAR);
  2415. PyModule_AddIntMacro(m, ND_PIL);
  2416. PyModule_AddIntMacro(m, ND_GETBUF_FAIL);
  2417. PyModule_AddIntMacro(m, ND_GETBUF_UNDEFINED);
  2418. PyModule_AddIntMacro(m, ND_REDIRECT);
  2419. PyModule_AddIntMacro(m, PyBUF_SIMPLE);
  2420. PyModule_AddIntMacro(m, PyBUF_WRITABLE);
  2421. PyModule_AddIntMacro(m, PyBUF_FORMAT);
  2422. PyModule_AddIntMacro(m, PyBUF_ND);
  2423. PyModule_AddIntMacro(m, PyBUF_STRIDES);
  2424. PyModule_AddIntMacro(m, PyBUF_INDIRECT);
  2425. PyModule_AddIntMacro(m, PyBUF_C_CONTIGUOUS);
  2426. PyModule_AddIntMacro(m, PyBUF_F_CONTIGUOUS);
  2427. PyModule_AddIntMacro(m, PyBUF_ANY_CONTIGUOUS);
  2428. PyModule_AddIntMacro(m, PyBUF_FULL);
  2429. PyModule_AddIntMacro(m, PyBUF_FULL_RO);
  2430. PyModule_AddIntMacro(m, PyBUF_RECORDS);
  2431. PyModule_AddIntMacro(m, PyBUF_RECORDS_RO);
  2432. PyModule_AddIntMacro(m, PyBUF_STRIDED);
  2433. PyModule_AddIntMacro(m, PyBUF_STRIDED_RO);
  2434. PyModule_AddIntMacro(m, PyBUF_CONTIG);
  2435. PyModule_AddIntMacro(m, PyBUF_CONTIG_RO);
  2436. PyModule_AddIntMacro(m, PyBUF_READ);
  2437. PyModule_AddIntMacro(m, PyBUF_WRITE);
  2438. return m;
  2439. }