You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2894 lines
82 KiB

  1. /* C Extension module to test all aspects of PEP-3118.
  2. Written by Stefan Krah. */
  3. #define PY_SSIZE_T_CLEAN
  4. #include "Python.h"
  5. /* struct module */
  6. static PyObject *structmodule = NULL;
  7. static PyObject *Struct = NULL;
  8. static PyObject *calcsize = NULL;
  9. /* cache simple format string */
  10. static const char *simple_fmt = "B";
  11. static PyObject *simple_format = NULL;
  12. #define SIMPLE_FORMAT(fmt) (fmt == NULL || strcmp(fmt, "B") == 0)
  13. #define FIX_FORMAT(fmt) (fmt == NULL ? "B" : fmt)
  14. /**************************************************************************/
  15. /* NDArray Object */
  16. /**************************************************************************/
  17. static PyTypeObject NDArray_Type;
  18. #define NDArray_Check(v) Py_IS_TYPE(v, &NDArray_Type)
  19. #define CHECK_LIST_OR_TUPLE(v) \
  20. if (!PyList_Check(v) && !PyTuple_Check(v)) { \
  21. PyErr_SetString(PyExc_TypeError, \
  22. #v " must be a list or a tuple"); \
  23. return NULL; \
  24. } \
  25. #define PyMem_XFree(v) \
  26. do { if (v) PyMem_Free(v); } while (0)
  27. /* Maximum number of dimensions. */
  28. #define ND_MAX_NDIM (2 * PyBUF_MAX_NDIM)
  29. /* Check for the presence of suboffsets in the first dimension. */
  30. #define HAVE_PTR(suboffsets) (suboffsets && suboffsets[0] >= 0)
  31. /* Adjust ptr if suboffsets are present. */
  32. #define ADJUST_PTR(ptr, suboffsets) \
  33. (HAVE_PTR(suboffsets) ? *((char**)ptr) + suboffsets[0] : ptr)
  34. /* Default: NumPy style (strides), read-only, no var-export, C-style layout */
  35. #define ND_DEFAULT 0x000
  36. /* User configurable flags for the ndarray */
  37. #define ND_VAREXPORT 0x001 /* change layout while buffers are exported */
  38. /* User configurable flags for each base buffer */
  39. #define ND_WRITABLE 0x002 /* mark base buffer as writable */
  40. #define ND_FORTRAN 0x004 /* Fortran contiguous layout */
  41. #define ND_SCALAR 0x008 /* scalar: ndim = 0 */
  42. #define ND_PIL 0x010 /* convert to PIL-style array (suboffsets) */
  43. #define ND_REDIRECT 0x020 /* redirect buffer requests */
  44. #define ND_GETBUF_FAIL 0x040 /* trigger getbuffer failure */
  45. #define ND_GETBUF_UNDEFINED 0x080 /* undefined view.obj */
  46. /* Internal flags for the base buffer */
  47. #define ND_C 0x100 /* C contiguous layout (default) */
  48. #define ND_OWN_ARRAYS 0x200 /* consumer owns arrays */
  49. /* ndarray properties */
  50. #define ND_IS_CONSUMER(nd) \
  51. (((NDArrayObject *)nd)->head == &((NDArrayObject *)nd)->staticbuf)
  52. /* ndbuf->flags properties */
  53. #define ND_C_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C)))
  54. #define ND_FORTRAN_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_FORTRAN)))
  55. #define ND_ANY_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C|ND_FORTRAN)))
  56. /* getbuffer() requests */
  57. #define REQ_INDIRECT(flags) ((flags&PyBUF_INDIRECT) == PyBUF_INDIRECT)
  58. #define REQ_C_CONTIGUOUS(flags) ((flags&PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS)
  59. #define REQ_F_CONTIGUOUS(flags) ((flags&PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS)
  60. #define REQ_ANY_CONTIGUOUS(flags) ((flags&PyBUF_ANY_CONTIGUOUS) == PyBUF_ANY_CONTIGUOUS)
  61. #define REQ_STRIDES(flags) ((flags&PyBUF_STRIDES) == PyBUF_STRIDES)
  62. #define REQ_SHAPE(flags) ((flags&PyBUF_ND) == PyBUF_ND)
  63. #define REQ_WRITABLE(flags) (flags&PyBUF_WRITABLE)
  64. #define REQ_FORMAT(flags) (flags&PyBUF_FORMAT)
  65. /* Single node of a list of base buffers. The list is needed to implement
  66. changes in memory layout while exported buffers are active. */
  67. static PyTypeObject NDArray_Type;
  68. struct ndbuf;
  69. typedef struct ndbuf {
  70. struct ndbuf *next;
  71. struct ndbuf *prev;
  72. Py_ssize_t len; /* length of data */
  73. Py_ssize_t offset; /* start of the array relative to data */
  74. char *data; /* raw data */
  75. int flags; /* capabilities of the base buffer */
  76. Py_ssize_t exports; /* number of exports */
  77. Py_buffer base; /* base buffer */
  78. } ndbuf_t;
  79. typedef struct {
  80. PyObject_HEAD
  81. int flags; /* ndarray flags */
  82. ndbuf_t staticbuf; /* static buffer for re-exporting mode */
  83. ndbuf_t *head; /* currently active base buffer */
  84. } NDArrayObject;
  85. static ndbuf_t *
  86. ndbuf_new(Py_ssize_t nitems, Py_ssize_t itemsize, Py_ssize_t offset, int flags)
  87. {
  88. ndbuf_t *ndbuf;
  89. Py_buffer *base;
  90. Py_ssize_t len;
  91. len = nitems * itemsize;
  92. if (offset % itemsize) {
  93. PyErr_SetString(PyExc_ValueError,
  94. "offset must be a multiple of itemsize");
  95. return NULL;
  96. }
  97. if (offset < 0 || offset+itemsize > len) {
  98. PyErr_SetString(PyExc_ValueError, "offset out of bounds");
  99. return NULL;
  100. }
  101. ndbuf = PyMem_Malloc(sizeof *ndbuf);
  102. if (ndbuf == NULL) {
  103. PyErr_NoMemory();
  104. return NULL;
  105. }
  106. ndbuf->next = NULL;
  107. ndbuf->prev = NULL;
  108. ndbuf->len = len;
  109. ndbuf->offset= offset;
  110. ndbuf->data = PyMem_Malloc(len);
  111. if (ndbuf->data == NULL) {
  112. PyErr_NoMemory();
  113. PyMem_Free(ndbuf);
  114. return NULL;
  115. }
  116. ndbuf->flags = flags;
  117. ndbuf->exports = 0;
  118. base = &ndbuf->base;
  119. base->obj = NULL;
  120. base->buf = ndbuf->data;
  121. base->len = len;
  122. base->itemsize = 1;
  123. base->readonly = 0;
  124. base->format = NULL;
  125. base->ndim = 1;
  126. base->shape = NULL;
  127. base->strides = NULL;
  128. base->suboffsets = NULL;
  129. base->internal = ndbuf;
  130. return ndbuf;
  131. }
  132. static void
  133. ndbuf_free(ndbuf_t *ndbuf)
  134. {
  135. Py_buffer *base = &ndbuf->base;
  136. PyMem_XFree(ndbuf->data);
  137. PyMem_XFree(base->format);
  138. PyMem_XFree(base->shape);
  139. PyMem_XFree(base->strides);
  140. PyMem_XFree(base->suboffsets);
  141. PyMem_Free(ndbuf);
  142. }
  143. static void
  144. ndbuf_push(NDArrayObject *nd, ndbuf_t *elt)
  145. {
  146. elt->next = nd->head;
  147. if (nd->head) nd->head->prev = elt;
  148. nd->head = elt;
  149. elt->prev = NULL;
  150. }
  151. static void
  152. ndbuf_delete(NDArrayObject *nd, ndbuf_t *elt)
  153. {
  154. if (elt->prev)
  155. elt->prev->next = elt->next;
  156. else
  157. nd->head = elt->next;
  158. if (elt->next)
  159. elt->next->prev = elt->prev;
  160. ndbuf_free(elt);
  161. }
  162. static void
  163. ndbuf_pop(NDArrayObject *nd)
  164. {
  165. ndbuf_delete(nd, nd->head);
  166. }
  167. static PyObject *
  168. ndarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
  169. {
  170. NDArrayObject *nd;
  171. nd = PyObject_New(NDArrayObject, &NDArray_Type);
  172. if (nd == NULL)
  173. return NULL;
  174. nd->flags = 0;
  175. nd->head = NULL;
  176. return (PyObject *)nd;
  177. }
  178. static void
  179. ndarray_dealloc(NDArrayObject *self)
  180. {
  181. if (self->head) {
  182. if (ND_IS_CONSUMER(self)) {
  183. Py_buffer *base = &self->head->base;
  184. if (self->head->flags & ND_OWN_ARRAYS) {
  185. PyMem_XFree(base->shape);
  186. PyMem_XFree(base->strides);
  187. PyMem_XFree(base->suboffsets);
  188. }
  189. PyBuffer_Release(base);
  190. }
  191. else {
  192. while (self->head)
  193. ndbuf_pop(self);
  194. }
  195. }
  196. PyObject_Free(self);
  197. }
  198. static int
  199. ndarray_init_staticbuf(PyObject *exporter, NDArrayObject *nd, int flags)
  200. {
  201. Py_buffer *base = &nd->staticbuf.base;
  202. if (PyObject_GetBuffer(exporter, base, flags) < 0)
  203. return -1;
  204. nd->head = &nd->staticbuf;
  205. nd->head->next = NULL;
  206. nd->head->prev = NULL;
  207. nd->head->len = -1;
  208. nd->head->offset = -1;
  209. nd->head->data = NULL;
  210. nd->head->flags = base->readonly ? 0 : ND_WRITABLE;
  211. nd->head->exports = 0;
  212. return 0;
  213. }
  214. static void
  215. init_flags(ndbuf_t *ndbuf)
  216. {
  217. if (ndbuf->base.ndim == 0)
  218. ndbuf->flags |= ND_SCALAR;
  219. if (ndbuf->base.suboffsets)
  220. ndbuf->flags |= ND_PIL;
  221. if (PyBuffer_IsContiguous(&ndbuf->base, 'C'))
  222. ndbuf->flags |= ND_C;
  223. if (PyBuffer_IsContiguous(&ndbuf->base, 'F'))
  224. ndbuf->flags |= ND_FORTRAN;
  225. }
  226. /****************************************************************************/
  227. /* Buffer/List conversions */
  228. /****************************************************************************/
  229. static Py_ssize_t *strides_from_shape(const ndbuf_t *, int flags);
  230. /* Get number of members in a struct: see issue #12740 */
  231. typedef struct {
  232. PyObject_HEAD
  233. Py_ssize_t s_size;
  234. Py_ssize_t s_len;
  235. } PyPartialStructObject;
  236. static Py_ssize_t
  237. get_nmemb(PyObject *s)
  238. {
  239. return ((PyPartialStructObject *)s)->s_len;
  240. }
  241. /* Pack all items into the buffer of 'obj'. The 'format' parameter must be
  242. in struct module syntax. For standard C types, a single item is an integer.
  243. For compound types, a single item is a tuple of integers. */
  244. static int
  245. pack_from_list(PyObject *obj, PyObject *items, PyObject *format,
  246. Py_ssize_t itemsize)
  247. {
  248. PyObject *structobj, *pack_into;
  249. PyObject *args, *offset;
  250. PyObject *item, *tmp;
  251. Py_ssize_t nitems; /* number of items */
  252. Py_ssize_t nmemb; /* number of members in a single item */
  253. Py_ssize_t i, j;
  254. int ret = 0;
  255. assert(PyObject_CheckBuffer(obj));
  256. assert(PyList_Check(items) || PyTuple_Check(items));
  257. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  258. if (structobj == NULL)
  259. return -1;
  260. nitems = PySequence_Fast_GET_SIZE(items);
  261. nmemb = get_nmemb(structobj);
  262. assert(nmemb >= 1);
  263. pack_into = PyObject_GetAttrString(structobj, "pack_into");
  264. if (pack_into == NULL) {
  265. Py_DECREF(structobj);
  266. return -1;
  267. }
  268. /* nmemb >= 1 */
  269. args = PyTuple_New(2 + nmemb);
  270. if (args == NULL) {
  271. Py_DECREF(pack_into);
  272. Py_DECREF(structobj);
  273. return -1;
  274. }
  275. offset = NULL;
  276. for (i = 0; i < nitems; i++) {
  277. /* Loop invariant: args[j] are borrowed references or NULL. */
  278. PyTuple_SET_ITEM(args, 0, obj);
  279. for (j = 1; j < 2+nmemb; j++)
  280. PyTuple_SET_ITEM(args, j, NULL);
  281. Py_XDECREF(offset);
  282. offset = PyLong_FromSsize_t(i*itemsize);
  283. if (offset == NULL) {
  284. ret = -1;
  285. break;
  286. }
  287. PyTuple_SET_ITEM(args, 1, offset);
  288. item = PySequence_Fast_GET_ITEM(items, i);
  289. if ((PyBytes_Check(item) || PyLong_Check(item) ||
  290. PyFloat_Check(item)) && nmemb == 1) {
  291. PyTuple_SET_ITEM(args, 2, item);
  292. }
  293. else if ((PyList_Check(item) || PyTuple_Check(item)) &&
  294. PySequence_Length(item) == nmemb) {
  295. for (j = 0; j < nmemb; j++) {
  296. tmp = PySequence_Fast_GET_ITEM(item, j);
  297. PyTuple_SET_ITEM(args, 2+j, tmp);
  298. }
  299. }
  300. else {
  301. PyErr_SetString(PyExc_ValueError,
  302. "mismatch between initializer element and format string");
  303. ret = -1;
  304. break;
  305. }
  306. tmp = PyObject_CallObject(pack_into, args);
  307. if (tmp == NULL) {
  308. ret = -1;
  309. break;
  310. }
  311. Py_DECREF(tmp);
  312. }
  313. Py_INCREF(obj); /* args[0] */
  314. /* args[1]: offset is either NULL or should be dealloc'd */
  315. for (i = 2; i < 2+nmemb; i++) {
  316. tmp = PyTuple_GET_ITEM(args, i);
  317. Py_XINCREF(tmp);
  318. }
  319. Py_DECREF(args);
  320. Py_DECREF(pack_into);
  321. Py_DECREF(structobj);
  322. return ret;
  323. }
  324. /* Pack single element */
  325. static int
  326. pack_single(char *ptr, PyObject *item, const char *fmt, Py_ssize_t itemsize)
  327. {
  328. PyObject *structobj = NULL, *pack_into = NULL, *args = NULL;
  329. PyObject *format = NULL, *mview = NULL, *zero = NULL;
  330. Py_ssize_t i, nmemb;
  331. int ret = -1;
  332. PyObject *x;
  333. if (fmt == NULL) fmt = "B";
  334. format = PyUnicode_FromString(fmt);
  335. if (format == NULL)
  336. goto out;
  337. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  338. if (structobj == NULL)
  339. goto out;
  340. nmemb = get_nmemb(structobj);
  341. assert(nmemb >= 1);
  342. mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_WRITE);
  343. if (mview == NULL)
  344. goto out;
  345. zero = PyLong_FromLong(0);
  346. if (zero == NULL)
  347. goto out;
  348. pack_into = PyObject_GetAttrString(structobj, "pack_into");
  349. if (pack_into == NULL)
  350. goto out;
  351. args = PyTuple_New(2+nmemb);
  352. if (args == NULL)
  353. goto out;
  354. PyTuple_SET_ITEM(args, 0, mview);
  355. PyTuple_SET_ITEM(args, 1, zero);
  356. if ((PyBytes_Check(item) || PyLong_Check(item) ||
  357. PyFloat_Check(item)) && nmemb == 1) {
  358. PyTuple_SET_ITEM(args, 2, item);
  359. }
  360. else if ((PyList_Check(item) || PyTuple_Check(item)) &&
  361. PySequence_Length(item) == nmemb) {
  362. for (i = 0; i < nmemb; i++) {
  363. x = PySequence_Fast_GET_ITEM(item, i);
  364. PyTuple_SET_ITEM(args, 2+i, x);
  365. }
  366. }
  367. else {
  368. PyErr_SetString(PyExc_ValueError,
  369. "mismatch between initializer element and format string");
  370. goto args_out;
  371. }
  372. x = PyObject_CallObject(pack_into, args);
  373. if (x != NULL) {
  374. Py_DECREF(x);
  375. ret = 0;
  376. }
  377. args_out:
  378. for (i = 0; i < 2+nmemb; i++)
  379. Py_XINCREF(PyTuple_GET_ITEM(args, i));
  380. Py_XDECREF(args);
  381. out:
  382. Py_XDECREF(pack_into);
  383. Py_XDECREF(zero);
  384. Py_XDECREF(mview);
  385. Py_XDECREF(structobj);
  386. Py_XDECREF(format);
  387. return ret;
  388. }
  389. static void
  390. copy_rec(const Py_ssize_t *shape, Py_ssize_t ndim, Py_ssize_t itemsize,
  391. char *dptr, const Py_ssize_t *dstrides, const Py_ssize_t *dsuboffsets,
  392. char *sptr, const Py_ssize_t *sstrides, const Py_ssize_t *ssuboffsets,
  393. char *mem)
  394. {
  395. Py_ssize_t i;
  396. assert(ndim >= 1);
  397. if (ndim == 1) {
  398. if (!HAVE_PTR(dsuboffsets) && !HAVE_PTR(ssuboffsets) &&
  399. dstrides[0] == itemsize && sstrides[0] == itemsize) {
  400. memmove(dptr, sptr, shape[0] * itemsize);
  401. }
  402. else {
  403. char *p;
  404. assert(mem != NULL);
  405. for (i=0, p=mem; i<shape[0]; p+=itemsize, sptr+=sstrides[0], i++) {
  406. char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
  407. memcpy(p, xsptr, itemsize);
  408. }
  409. for (i=0, p=mem; i<shape[0]; p+=itemsize, dptr+=dstrides[0], i++) {
  410. char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
  411. memcpy(xdptr, p, itemsize);
  412. }
  413. }
  414. return;
  415. }
  416. for (i = 0; i < shape[0]; dptr+=dstrides[0], sptr+=sstrides[0], i++) {
  417. char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
  418. char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
  419. copy_rec(shape+1, ndim-1, itemsize,
  420. xdptr, dstrides+1, dsuboffsets ? dsuboffsets+1 : NULL,
  421. xsptr, sstrides+1, ssuboffsets ? ssuboffsets+1 : NULL,
  422. mem);
  423. }
  424. }
  425. static int
  426. cmp_structure(Py_buffer *dest, Py_buffer *src)
  427. {
  428. Py_ssize_t i;
  429. if (strcmp(FIX_FORMAT(dest->format), FIX_FORMAT(src->format)) != 0 ||
  430. dest->itemsize != src->itemsize ||
  431. dest->ndim != src->ndim)
  432. return -1;
  433. for (i = 0; i < dest->ndim; i++) {
  434. if (dest->shape[i] != src->shape[i])
  435. return -1;
  436. if (dest->shape[i] == 0)
  437. break;
  438. }
  439. return 0;
  440. }
  441. /* Copy src to dest. Both buffers must have the same format, itemsize,
  442. ndim and shape. Copying is atomic, the function never fails with
  443. a partial copy. */
  444. static int
  445. copy_buffer(Py_buffer *dest, Py_buffer *src)
  446. {
  447. char *mem = NULL;
  448. assert(dest->ndim > 0);
  449. if (cmp_structure(dest, src) < 0) {
  450. PyErr_SetString(PyExc_ValueError,
  451. "ndarray assignment: lvalue and rvalue have different structures");
  452. return -1;
  453. }
  454. if ((dest->suboffsets && dest->suboffsets[dest->ndim-1] >= 0) ||
  455. (src->suboffsets && src->suboffsets[src->ndim-1] >= 0) ||
  456. dest->strides[dest->ndim-1] != dest->itemsize ||
  457. src->strides[src->ndim-1] != src->itemsize) {
  458. mem = PyMem_Malloc(dest->shape[dest->ndim-1] * dest->itemsize);
  459. if (mem == NULL) {
  460. PyErr_NoMemory();
  461. return -1;
  462. }
  463. }
  464. copy_rec(dest->shape, dest->ndim, dest->itemsize,
  465. dest->buf, dest->strides, dest->suboffsets,
  466. src->buf, src->strides, src->suboffsets,
  467. mem);
  468. PyMem_XFree(mem);
  469. return 0;
  470. }
  471. /* Unpack single element */
  472. static PyObject *
  473. unpack_single(char *ptr, const char *fmt, Py_ssize_t itemsize)
  474. {
  475. PyObject *x, *unpack_from, *mview;
  476. if (fmt == NULL) {
  477. fmt = "B";
  478. itemsize = 1;
  479. }
  480. unpack_from = PyObject_GetAttrString(structmodule, "unpack_from");
  481. if (unpack_from == NULL)
  482. return NULL;
  483. mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_READ);
  484. if (mview == NULL) {
  485. Py_DECREF(unpack_from);
  486. return NULL;
  487. }
  488. x = PyObject_CallFunction(unpack_from, "sO", fmt, mview);
  489. Py_DECREF(unpack_from);
  490. Py_DECREF(mview);
  491. if (x == NULL)
  492. return NULL;
  493. if (PyTuple_GET_SIZE(x) == 1) {
  494. PyObject *tmp = PyTuple_GET_ITEM(x, 0);
  495. Py_INCREF(tmp);
  496. Py_DECREF(x);
  497. return tmp;
  498. }
  499. return x;
  500. }
  501. /* Unpack a multi-dimensional matrix into a nested list. Return a scalar
  502. for ndim = 0. */
  503. static PyObject *
  504. unpack_rec(PyObject *unpack_from, char *ptr, PyObject *mview, char *item,
  505. const Py_ssize_t *shape, const Py_ssize_t *strides,
  506. const Py_ssize_t *suboffsets, Py_ssize_t ndim, Py_ssize_t itemsize)
  507. {
  508. PyObject *lst, *x;
  509. Py_ssize_t i;
  510. assert(ndim >= 0);
  511. assert(shape != NULL);
  512. assert(strides != NULL);
  513. if (ndim == 0) {
  514. memcpy(item, ptr, itemsize);
  515. x = PyObject_CallFunctionObjArgs(unpack_from, mview, NULL);
  516. if (x == NULL)
  517. return NULL;
  518. if (PyTuple_GET_SIZE(x) == 1) {
  519. PyObject *tmp = PyTuple_GET_ITEM(x, 0);
  520. Py_INCREF(tmp);
  521. Py_DECREF(x);
  522. return tmp;
  523. }
  524. return x;
  525. }
  526. lst = PyList_New(shape[0]);
  527. if (lst == NULL)
  528. return NULL;
  529. for (i = 0; i < shape[0]; ptr+=strides[0], i++) {
  530. char *nextptr = ADJUST_PTR(ptr, suboffsets);
  531. x = unpack_rec(unpack_from, nextptr, mview, item,
  532. shape+1, strides+1, suboffsets ? suboffsets+1 : NULL,
  533. ndim-1, itemsize);
  534. if (x == NULL) {
  535. Py_DECREF(lst);
  536. return NULL;
  537. }
  538. PyList_SET_ITEM(lst, i, x);
  539. }
  540. return lst;
  541. }
  542. static PyObject *
  543. ndarray_as_list(NDArrayObject *nd)
  544. {
  545. PyObject *structobj = NULL, *unpack_from = NULL;
  546. PyObject *lst = NULL, *mview = NULL;
  547. Py_buffer *base = &nd->head->base;
  548. Py_ssize_t *shape = base->shape;
  549. Py_ssize_t *strides = base->strides;
  550. Py_ssize_t simple_shape[1];
  551. Py_ssize_t simple_strides[1];
  552. char *item = NULL;
  553. PyObject *format;
  554. char *fmt = base->format;
  555. base = &nd->head->base;
  556. if (fmt == NULL) {
  557. PyErr_SetString(PyExc_ValueError,
  558. "ndarray: tolist() does not support format=NULL, use "
  559. "tobytes()");
  560. return NULL;
  561. }
  562. if (shape == NULL) {
  563. assert(ND_C_CONTIGUOUS(nd->head->flags));
  564. assert(base->strides == NULL);
  565. assert(base->ndim <= 1);
  566. shape = simple_shape;
  567. shape[0] = base->len;
  568. strides = simple_strides;
  569. strides[0] = base->itemsize;
  570. }
  571. else if (strides == NULL) {
  572. assert(ND_C_CONTIGUOUS(nd->head->flags));
  573. strides = strides_from_shape(nd->head, 0);
  574. if (strides == NULL)
  575. return NULL;
  576. }
  577. format = PyUnicode_FromString(fmt);
  578. if (format == NULL)
  579. goto out;
  580. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  581. Py_DECREF(format);
  582. if (structobj == NULL)
  583. goto out;
  584. unpack_from = PyObject_GetAttrString(structobj, "unpack_from");
  585. if (unpack_from == NULL)
  586. goto out;
  587. item = PyMem_Malloc(base->itemsize);
  588. if (item == NULL) {
  589. PyErr_NoMemory();
  590. goto out;
  591. }
  592. mview = PyMemoryView_FromMemory(item, base->itemsize, PyBUF_WRITE);
  593. if (mview == NULL)
  594. goto out;
  595. lst = unpack_rec(unpack_from, base->buf, mview, item,
  596. shape, strides, base->suboffsets,
  597. base->ndim, base->itemsize);
  598. out:
  599. Py_XDECREF(mview);
  600. PyMem_XFree(item);
  601. Py_XDECREF(unpack_from);
  602. Py_XDECREF(structobj);
  603. if (strides != base->strides && strides != simple_strides)
  604. PyMem_XFree(strides);
  605. return lst;
  606. }
  607. /****************************************************************************/
  608. /* Initialize ndbuf */
  609. /****************************************************************************/
  610. /*
  611. State of a new ndbuf during initialization. 'OK' means that initialization
  612. is complete. 'PTR' means that a pointer has been initialized, but the
  613. state of the memory is still undefined and ndbuf->offset is disregarded.
  614. +-----------------+-----------+-------------+----------------+
  615. | | ndbuf_new | init_simple | init_structure |
  616. +-----------------+-----------+-------------+----------------+
  617. | next | OK (NULL) | OK | OK |
  618. +-----------------+-----------+-------------+----------------+
  619. | prev | OK (NULL) | OK | OK |
  620. +-----------------+-----------+-------------+----------------+
  621. | len | OK | OK | OK |
  622. +-----------------+-----------+-------------+----------------+
  623. | offset | OK | OK | OK |
  624. +-----------------+-----------+-------------+----------------+
  625. | data | PTR | OK | OK |
  626. +-----------------+-----------+-------------+----------------+
  627. | flags | user | user | OK |
  628. +-----------------+-----------+-------------+----------------+
  629. | exports | OK (0) | OK | OK |
  630. +-----------------+-----------+-------------+----------------+
  631. | base.obj | OK (NULL) | OK | OK |
  632. +-----------------+-----------+-------------+----------------+
  633. | base.buf | PTR | PTR | OK |
  634. +-----------------+-----------+-------------+----------------+
  635. | base.len | len(data) | len(data) | OK |
  636. +-----------------+-----------+-------------+----------------+
  637. | base.itemsize | 1 | OK | OK |
  638. +-----------------+-----------+-------------+----------------+
  639. | base.readonly | 0 | OK | OK |
  640. +-----------------+-----------+-------------+----------------+
  641. | base.format | NULL | OK | OK |
  642. +-----------------+-----------+-------------+----------------+
  643. | base.ndim | 1 | 1 | OK |
  644. +-----------------+-----------+-------------+----------------+
  645. | base.shape | NULL | NULL | OK |
  646. +-----------------+-----------+-------------+----------------+
  647. | base.strides | NULL | NULL | OK |
  648. +-----------------+-----------+-------------+----------------+
  649. | base.suboffsets | NULL | NULL | OK |
  650. +-----------------+-----------+-------------+----------------+
  651. | base.internal | OK | OK | OK |
  652. +-----------------+-----------+-------------+----------------+
  653. */
  654. static Py_ssize_t
  655. get_itemsize(PyObject *format)
  656. {
  657. PyObject *tmp;
  658. Py_ssize_t itemsize;
  659. tmp = PyObject_CallFunctionObjArgs(calcsize, format, NULL);
  660. if (tmp == NULL)
  661. return -1;
  662. itemsize = PyLong_AsSsize_t(tmp);
  663. Py_DECREF(tmp);
  664. return itemsize;
  665. }
  666. static char *
  667. get_format(PyObject *format)
  668. {
  669. PyObject *tmp;
  670. char *fmt;
  671. tmp = PyUnicode_AsASCIIString(format);
  672. if (tmp == NULL)
  673. return NULL;
  674. fmt = PyMem_Malloc(PyBytes_GET_SIZE(tmp)+1);
  675. if (fmt == NULL) {
  676. PyErr_NoMemory();
  677. Py_DECREF(tmp);
  678. return NULL;
  679. }
  680. strcpy(fmt, PyBytes_AS_STRING(tmp));
  681. Py_DECREF(tmp);
  682. return fmt;
  683. }
  684. static int
  685. init_simple(ndbuf_t *ndbuf, PyObject *items, PyObject *format,
  686. Py_ssize_t itemsize)
  687. {
  688. PyObject *mview;
  689. Py_buffer *base = &ndbuf->base;
  690. int ret;
  691. mview = PyMemoryView_FromBuffer(base);
  692. if (mview == NULL)
  693. return -1;
  694. ret = pack_from_list(mview, items, format, itemsize);
  695. Py_DECREF(mview);
  696. if (ret < 0)
  697. return -1;
  698. base->readonly = !(ndbuf->flags & ND_WRITABLE);
  699. base->itemsize = itemsize;
  700. base->format = get_format(format);
  701. if (base->format == NULL)
  702. return -1;
  703. return 0;
  704. }
  705. static Py_ssize_t *
  706. seq_as_ssize_array(PyObject *seq, Py_ssize_t len, int is_shape)
  707. {
  708. Py_ssize_t *dest;
  709. Py_ssize_t x, i;
  710. /* ndim = len <= ND_MAX_NDIM, so PyMem_New() is actually not needed. */
  711. dest = PyMem_New(Py_ssize_t, len);
  712. if (dest == NULL) {
  713. PyErr_NoMemory();
  714. return NULL;
  715. }
  716. for (i = 0; i < len; i++) {
  717. PyObject *tmp = PySequence_Fast_GET_ITEM(seq, i);
  718. if (!PyLong_Check(tmp)) {
  719. PyErr_Format(PyExc_ValueError,
  720. "elements of %s must be integers",
  721. is_shape ? "shape" : "strides");
  722. PyMem_Free(dest);
  723. return NULL;
  724. }
  725. x = PyLong_AsSsize_t(tmp);
  726. if (PyErr_Occurred()) {
  727. PyMem_Free(dest);
  728. return NULL;
  729. }
  730. if (is_shape && x < 0) {
  731. PyErr_Format(PyExc_ValueError,
  732. "elements of shape must be integers >= 0");
  733. PyMem_Free(dest);
  734. return NULL;
  735. }
  736. dest[i] = x;
  737. }
  738. return dest;
  739. }
  740. static Py_ssize_t *
  741. strides_from_shape(const ndbuf_t *ndbuf, int flags)
  742. {
  743. const Py_buffer *base = &ndbuf->base;
  744. Py_ssize_t *s, i;
  745. s = PyMem_Malloc(base->ndim * (sizeof *s));
  746. if (s == NULL) {
  747. PyErr_NoMemory();
  748. return NULL;
  749. }
  750. if (flags & ND_FORTRAN) {
  751. s[0] = base->itemsize;
  752. for (i = 1; i < base->ndim; i++)
  753. s[i] = s[i-1] * base->shape[i-1];
  754. }
  755. else {
  756. s[base->ndim-1] = base->itemsize;
  757. for (i = base->ndim-2; i >= 0; i--)
  758. s[i] = s[i+1] * base->shape[i+1];
  759. }
  760. return s;
  761. }
  762. /* Bounds check:
  763. len := complete length of allocated memory
  764. offset := start of the array
  765. A single array element is indexed by:
  766. i = indices[0] * strides[0] + indices[1] * strides[1] + ...
  767. imin is reached when all indices[n] combined with positive strides are 0
  768. and all indices combined with negative strides are shape[n]-1, which is
  769. the maximum index for the nth dimension.
  770. imax is reached when all indices[n] combined with negative strides are 0
  771. and all indices combined with positive strides are shape[n]-1.
  772. */
  773. static int
  774. verify_structure(Py_ssize_t len, Py_ssize_t itemsize, Py_ssize_t offset,
  775. const Py_ssize_t *shape, const Py_ssize_t *strides,
  776. Py_ssize_t ndim)
  777. {
  778. Py_ssize_t imin, imax;
  779. Py_ssize_t n;
  780. assert(ndim >= 0);
  781. if (ndim == 0 && (offset < 0 || offset+itemsize > len))
  782. goto invalid_combination;
  783. for (n = 0; n < ndim; n++)
  784. if (strides[n] % itemsize) {
  785. PyErr_SetString(PyExc_ValueError,
  786. "strides must be a multiple of itemsize");
  787. return -1;
  788. }
  789. for (n = 0; n < ndim; n++)
  790. if (shape[n] == 0)
  791. return 0;
  792. imin = imax = 0;
  793. for (n = 0; n < ndim; n++)
  794. if (strides[n] <= 0)
  795. imin += (shape[n]-1) * strides[n];
  796. else
  797. imax += (shape[n]-1) * strides[n];
  798. if (imin + offset < 0 || imax + offset + itemsize > len)
  799. goto invalid_combination;
  800. return 0;
  801. invalid_combination:
  802. PyErr_SetString(PyExc_ValueError,
  803. "invalid combination of buffer, shape and strides");
  804. return -1;
  805. }
  806. /*
  807. Convert a NumPy-style array to an array using suboffsets to stride in
  808. the first dimension. Requirements: ndim > 0.
  809. Contiguous example
  810. ==================
  811. Input:
  812. ------
  813. shape = {2, 2, 3};
  814. strides = {6, 3, 1};
  815. suboffsets = NULL;
  816. data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  817. buf = &data[0]
  818. Output:
  819. -------
  820. shape = {2, 2, 3};
  821. strides = {sizeof(char *), 3, 1};
  822. suboffsets = {0, -1, -1};
  823. data = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  824. | | ^ ^
  825. `---'---' |
  826. | |
  827. `---------------------'
  828. buf = &data[0]
  829. So, in the example the input resembles the three-dimensional array
  830. char v[2][2][3], while the output resembles an array of two pointers
  831. to two-dimensional arrays: char (*v[2])[2][3].
  832. Non-contiguous example:
  833. =======================
  834. Input (with offset and negative strides):
  835. -----------------------------------------
  836. shape = {2, 2, 3};
  837. strides = {-6, 3, -1};
  838. offset = 8
  839. suboffsets = NULL;
  840. data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  841. Output:
  842. -------
  843. shape = {2, 2, 3};
  844. strides = {-sizeof(char *), 3, -1};
  845. suboffsets = {2, -1, -1};
  846. newdata = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  847. | | ^ ^ ^ ^
  848. `---'---' | | `- p2+suboffsets[0]
  849. | `-----------|--- p1+suboffsets[0]
  850. `---------------------'
  851. buf = &newdata[1] # striding backwards over the pointers.
  852. suboffsets[0] is the same as the offset that one would specify if
  853. the two {2, 3} subarrays were created directly, hence the name.
  854. */
  855. static int
  856. init_suboffsets(ndbuf_t *ndbuf)
  857. {
  858. Py_buffer *base = &ndbuf->base;
  859. Py_ssize_t start, step;
  860. Py_ssize_t imin, suboffset0;
  861. Py_ssize_t addsize;
  862. Py_ssize_t n;
  863. char *data;
  864. assert(base->ndim > 0);
  865. assert(base->suboffsets == NULL);
  866. /* Allocate new data with additional space for shape[0] pointers. */
  867. addsize = base->shape[0] * (sizeof (char *));
  868. /* Align array start to a multiple of 8. */
  869. addsize = 8 * ((addsize + 7) / 8);
  870. data = PyMem_Malloc(ndbuf->len + addsize);
  871. if (data == NULL) {
  872. PyErr_NoMemory();
  873. return -1;
  874. }
  875. memcpy(data + addsize, ndbuf->data, ndbuf->len);
  876. PyMem_Free(ndbuf->data);
  877. ndbuf->data = data;
  878. ndbuf->len += addsize;
  879. base->buf = ndbuf->data;
  880. /* imin: minimum index of the input array relative to ndbuf->offset.
  881. suboffset0: offset for each sub-array of the output. This is the
  882. same as calculating -imin' for a sub-array of ndim-1. */
  883. imin = suboffset0 = 0;
  884. for (n = 0; n < base->ndim; n++) {
  885. if (base->shape[n] == 0)
  886. break;
  887. if (base->strides[n] <= 0) {
  888. Py_ssize_t x = (base->shape[n]-1) * base->strides[n];
  889. imin += x;
  890. suboffset0 += (n >= 1) ? -x : 0;
  891. }
  892. }
  893. /* Initialize the array of pointers to the sub-arrays. */
  894. start = addsize + ndbuf->offset + imin;
  895. step = base->strides[0] < 0 ? -base->strides[0] : base->strides[0];
  896. for (n = 0; n < base->shape[0]; n++)
  897. ((char **)base->buf)[n] = (char *)base->buf + start + n*step;
  898. /* Initialize suboffsets. */
  899. base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
  900. if (base->suboffsets == NULL) {
  901. PyErr_NoMemory();
  902. return -1;
  903. }
  904. base->suboffsets[0] = suboffset0;
  905. for (n = 1; n < base->ndim; n++)
  906. base->suboffsets[n] = -1;
  907. /* Adjust strides for the first (zeroth) dimension. */
  908. if (base->strides[0] >= 0) {
  909. base->strides[0] = sizeof(char *);
  910. }
  911. else {
  912. /* Striding backwards. */
  913. base->strides[0] = -(Py_ssize_t)sizeof(char *);
  914. if (base->shape[0] > 0)
  915. base->buf = (char *)base->buf + (base->shape[0]-1) * sizeof(char *);
  916. }
  917. ndbuf->flags &= ~(ND_C|ND_FORTRAN);
  918. ndbuf->offset = 0;
  919. return 0;
  920. }
  921. static void
  922. init_len(Py_buffer *base)
  923. {
  924. Py_ssize_t i;
  925. base->len = 1;
  926. for (i = 0; i < base->ndim; i++)
  927. base->len *= base->shape[i];
  928. base->len *= base->itemsize;
  929. }
  930. static int
  931. init_structure(ndbuf_t *ndbuf, PyObject *shape, PyObject *strides,
  932. Py_ssize_t ndim)
  933. {
  934. Py_buffer *base = &ndbuf->base;
  935. base->ndim = (int)ndim;
  936. if (ndim == 0) {
  937. if (ndbuf->flags & ND_PIL) {
  938. PyErr_SetString(PyExc_TypeError,
  939. "ndim = 0 cannot be used in conjunction with ND_PIL");
  940. return -1;
  941. }
  942. ndbuf->flags |= (ND_SCALAR|ND_C|ND_FORTRAN);
  943. return 0;
  944. }
  945. /* shape */
  946. base->shape = seq_as_ssize_array(shape, ndim, 1);
  947. if (base->shape == NULL)
  948. return -1;
  949. /* strides */
  950. if (strides) {
  951. base->strides = seq_as_ssize_array(strides, ndim, 0);
  952. }
  953. else {
  954. base->strides = strides_from_shape(ndbuf, ndbuf->flags);
  955. }
  956. if (base->strides == NULL)
  957. return -1;
  958. if (verify_structure(base->len, base->itemsize, ndbuf->offset,
  959. base->shape, base->strides, ndim) < 0)
  960. return -1;
  961. /* buf */
  962. base->buf = ndbuf->data + ndbuf->offset;
  963. /* len */
  964. init_len(base);
  965. /* ndbuf->flags */
  966. if (PyBuffer_IsContiguous(base, 'C'))
  967. ndbuf->flags |= ND_C;
  968. if (PyBuffer_IsContiguous(base, 'F'))
  969. ndbuf->flags |= ND_FORTRAN;
  970. /* convert numpy array to suboffset representation */
  971. if (ndbuf->flags & ND_PIL) {
  972. /* modifies base->buf, base->strides and base->suboffsets **/
  973. return init_suboffsets(ndbuf);
  974. }
  975. return 0;
  976. }
  977. static ndbuf_t *
  978. init_ndbuf(PyObject *items, PyObject *shape, PyObject *strides,
  979. Py_ssize_t offset, PyObject *format, int flags)
  980. {
  981. ndbuf_t *ndbuf;
  982. Py_ssize_t ndim;
  983. Py_ssize_t nitems;
  984. Py_ssize_t itemsize;
  985. /* ndim = len(shape) */
  986. CHECK_LIST_OR_TUPLE(shape)
  987. ndim = PySequence_Fast_GET_SIZE(shape);
  988. if (ndim > ND_MAX_NDIM) {
  989. PyErr_Format(PyExc_ValueError,
  990. "ndim must not exceed %d", ND_MAX_NDIM);
  991. return NULL;
  992. }
  993. /* len(strides) = len(shape) */
  994. if (strides) {
  995. CHECK_LIST_OR_TUPLE(strides)
  996. if (PySequence_Fast_GET_SIZE(strides) == 0)
  997. strides = NULL;
  998. else if (flags & ND_FORTRAN) {
  999. PyErr_SetString(PyExc_TypeError,
  1000. "ND_FORTRAN cannot be used together with strides");
  1001. return NULL;
  1002. }
  1003. else if (PySequence_Fast_GET_SIZE(strides) != ndim) {
  1004. PyErr_SetString(PyExc_ValueError,
  1005. "len(shape) != len(strides)");
  1006. return NULL;
  1007. }
  1008. }
  1009. /* itemsize */
  1010. itemsize = get_itemsize(format);
  1011. if (itemsize <= 0) {
  1012. if (itemsize == 0) {
  1013. PyErr_SetString(PyExc_ValueError,
  1014. "itemsize must not be zero");
  1015. }
  1016. return NULL;
  1017. }
  1018. /* convert scalar to list */
  1019. if (ndim == 0) {
  1020. items = Py_BuildValue("(O)", items);
  1021. if (items == NULL)
  1022. return NULL;
  1023. }
  1024. else {
  1025. CHECK_LIST_OR_TUPLE(items)
  1026. Py_INCREF(items);
  1027. }
  1028. /* number of items */
  1029. nitems = PySequence_Fast_GET_SIZE(items);
  1030. if (nitems == 0) {
  1031. PyErr_SetString(PyExc_ValueError,
  1032. "initializer list or tuple must not be empty");
  1033. Py_DECREF(items);
  1034. return NULL;
  1035. }
  1036. ndbuf = ndbuf_new(nitems, itemsize, offset, flags);
  1037. if (ndbuf == NULL) {
  1038. Py_DECREF(items);
  1039. return NULL;
  1040. }
  1041. if (init_simple(ndbuf, items, format, itemsize) < 0)
  1042. goto error;
  1043. if (init_structure(ndbuf, shape, strides, ndim) < 0)
  1044. goto error;
  1045. Py_DECREF(items);
  1046. return ndbuf;
  1047. error:
  1048. Py_DECREF(items);
  1049. ndbuf_free(ndbuf);
  1050. return NULL;
  1051. }
  1052. /* initialize and push a new base onto the linked list */
  1053. static int
  1054. ndarray_push_base(NDArrayObject *nd, PyObject *items,
  1055. PyObject *shape, PyObject *strides,
  1056. Py_ssize_t offset, PyObject *format, int flags)
  1057. {
  1058. ndbuf_t *ndbuf;
  1059. ndbuf = init_ndbuf(items, shape, strides, offset, format, flags);
  1060. if (ndbuf == NULL)
  1061. return -1;
  1062. ndbuf_push(nd, ndbuf);
  1063. return 0;
  1064. }
  1065. #define PyBUF_UNUSED 0x10000
  1066. static int
  1067. ndarray_init(PyObject *self, PyObject *args, PyObject *kwds)
  1068. {
  1069. NDArrayObject *nd = (NDArrayObject *)self;
  1070. static char *kwlist[] = {
  1071. "obj", "shape", "strides", "offset", "format", "flags", "getbuf", NULL
  1072. };
  1073. PyObject *v = NULL; /* initializer: scalar, list, tuple or base object */
  1074. PyObject *shape = NULL; /* size of each dimension */
  1075. PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
  1076. Py_ssize_t offset = 0; /* buffer offset */
  1077. PyObject *format = simple_format; /* struct module specifier: "B" */
  1078. int flags = ND_DEFAULT; /* base buffer and ndarray flags */
  1079. int getbuf = PyBUF_UNUSED; /* re-exporter: getbuffer request flags */
  1080. if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|OOnOii", kwlist,
  1081. &v, &shape, &strides, &offset, &format, &flags, &getbuf))
  1082. return -1;
  1083. /* NDArrayObject is re-exporter */
  1084. if (PyObject_CheckBuffer(v) && shape == NULL) {
  1085. if (strides || offset || format != simple_format ||
  1086. !(flags == ND_DEFAULT || flags == ND_REDIRECT)) {
  1087. PyErr_SetString(PyExc_TypeError,
  1088. "construction from exporter object only takes 'obj', 'getbuf' "
  1089. "and 'flags' arguments");
  1090. return -1;
  1091. }
  1092. getbuf = (getbuf == PyBUF_UNUSED) ? PyBUF_FULL_RO : getbuf;
  1093. if (ndarray_init_staticbuf(v, nd, getbuf) < 0)
  1094. return -1;
  1095. init_flags(nd->head);
  1096. nd->head->flags |= flags;
  1097. return 0;
  1098. }
  1099. /* NDArrayObject is the original base object. */
  1100. if (getbuf != PyBUF_UNUSED) {
  1101. PyErr_SetString(PyExc_TypeError,
  1102. "getbuf argument only valid for construction from exporter "
  1103. "object");
  1104. return -1;
  1105. }
  1106. if (shape == NULL) {
  1107. PyErr_SetString(PyExc_TypeError,
  1108. "shape is a required argument when constructing from "
  1109. "list, tuple or scalar");
  1110. return -1;
  1111. }
  1112. if (flags & ND_VAREXPORT) {
  1113. nd->flags |= ND_VAREXPORT;
  1114. flags &= ~ND_VAREXPORT;
  1115. }
  1116. /* Initialize and push the first base buffer onto the linked list. */
  1117. return ndarray_push_base(nd, v, shape, strides, offset, format, flags);
  1118. }
  1119. /* Push an additional base onto the linked list. */
  1120. static PyObject *
  1121. ndarray_push(PyObject *self, PyObject *args, PyObject *kwds)
  1122. {
  1123. NDArrayObject *nd = (NDArrayObject *)self;
  1124. static char *kwlist[] = {
  1125. "items", "shape", "strides", "offset", "format", "flags", NULL
  1126. };
  1127. PyObject *items = NULL; /* initializer: scalar, list or tuple */
  1128. PyObject *shape = NULL; /* size of each dimension */
  1129. PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
  1130. PyObject *format = simple_format; /* struct module specifier: "B" */
  1131. Py_ssize_t offset = 0; /* buffer offset */
  1132. int flags = ND_DEFAULT; /* base buffer flags */
  1133. if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO|OnOi", kwlist,
  1134. &items, &shape, &strides, &offset, &format, &flags))
  1135. return NULL;
  1136. if (flags & ND_VAREXPORT) {
  1137. PyErr_SetString(PyExc_ValueError,
  1138. "ND_VAREXPORT flag can only be used during object creation");
  1139. return NULL;
  1140. }
  1141. if (ND_IS_CONSUMER(nd)) {
  1142. PyErr_SetString(PyExc_BufferError,
  1143. "structure of re-exporting object is immutable");
  1144. return NULL;
  1145. }
  1146. if (!(nd->flags&ND_VAREXPORT) && nd->head->exports > 0) {
  1147. PyErr_Format(PyExc_BufferError,
  1148. "cannot change structure: %zd exported buffer%s",
  1149. nd->head->exports, nd->head->exports==1 ? "" : "s");
  1150. return NULL;
  1151. }
  1152. if (ndarray_push_base(nd, items, shape, strides,
  1153. offset, format, flags) < 0)
  1154. return NULL;
  1155. Py_RETURN_NONE;
  1156. }
  1157. /* Pop a base from the linked list (if possible). */
  1158. static PyObject *
  1159. ndarray_pop(PyObject *self, PyObject *dummy)
  1160. {
  1161. NDArrayObject *nd = (NDArrayObject *)self;
  1162. if (ND_IS_CONSUMER(nd)) {
  1163. PyErr_SetString(PyExc_BufferError,
  1164. "structure of re-exporting object is immutable");
  1165. return NULL;
  1166. }
  1167. if (nd->head->exports > 0) {
  1168. PyErr_Format(PyExc_BufferError,
  1169. "cannot change structure: %zd exported buffer%s",
  1170. nd->head->exports, nd->head->exports==1 ? "" : "s");
  1171. return NULL;
  1172. }
  1173. if (nd->head->next == NULL) {
  1174. PyErr_SetString(PyExc_BufferError,
  1175. "list only has a single base");
  1176. return NULL;
  1177. }
  1178. ndbuf_pop(nd);
  1179. Py_RETURN_NONE;
  1180. }
  1181. /**************************************************************************/
  1182. /* getbuffer */
  1183. /**************************************************************************/
  1184. static int
  1185. ndarray_getbuf(NDArrayObject *self, Py_buffer *view, int flags)
  1186. {
  1187. ndbuf_t *ndbuf = self->head;
  1188. Py_buffer *base = &ndbuf->base;
  1189. int baseflags = ndbuf->flags;
  1190. /* redirect mode */
  1191. if (base->obj != NULL && (baseflags&ND_REDIRECT)) {
  1192. return PyObject_GetBuffer(base->obj, view, flags);
  1193. }
  1194. /* start with complete information */
  1195. *view = *base;
  1196. view->obj = NULL;
  1197. /* reconstruct format */
  1198. if (view->format == NULL)
  1199. view->format = "B";
  1200. if (base->ndim != 0 &&
  1201. ((REQ_SHAPE(flags) && base->shape == NULL) ||
  1202. (REQ_STRIDES(flags) && base->strides == NULL))) {
  1203. /* The ndarray is a re-exporter that has been created without full
  1204. information for testing purposes. In this particular case the
  1205. ndarray is not a PEP-3118 compliant buffer provider. */
  1206. PyErr_SetString(PyExc_BufferError,
  1207. "re-exporter does not provide format, shape or strides");
  1208. return -1;
  1209. }
  1210. if (baseflags & ND_GETBUF_FAIL) {
  1211. PyErr_SetString(PyExc_BufferError,
  1212. "ND_GETBUF_FAIL: forced test exception");
  1213. if (baseflags & ND_GETBUF_UNDEFINED)
  1214. view->obj = (PyObject *)0x1; /* wrong but permitted in <= 3.2 */
  1215. return -1;
  1216. }
  1217. if (REQ_WRITABLE(flags) && base->readonly) {
  1218. PyErr_SetString(PyExc_BufferError,
  1219. "ndarray is not writable");
  1220. return -1;
  1221. }
  1222. if (!REQ_FORMAT(flags)) {
  1223. /* NULL indicates that the buffer's data type has been cast to 'B'.
  1224. view->itemsize is the _previous_ itemsize. If shape is present,
  1225. the equality product(shape) * itemsize = len still holds at this
  1226. point. The equality calcsize(format) = itemsize does _not_ hold
  1227. from here on! */
  1228. view->format = NULL;
  1229. }
  1230. if (REQ_C_CONTIGUOUS(flags) && !ND_C_CONTIGUOUS(baseflags)) {
  1231. PyErr_SetString(PyExc_BufferError,
  1232. "ndarray is not C-contiguous");
  1233. return -1;
  1234. }
  1235. if (REQ_F_CONTIGUOUS(flags) && !ND_FORTRAN_CONTIGUOUS(baseflags)) {
  1236. PyErr_SetString(PyExc_BufferError,
  1237. "ndarray is not Fortran contiguous");
  1238. return -1;
  1239. }
  1240. if (REQ_ANY_CONTIGUOUS(flags) && !ND_ANY_CONTIGUOUS(baseflags)) {
  1241. PyErr_SetString(PyExc_BufferError,
  1242. "ndarray is not contiguous");
  1243. return -1;
  1244. }
  1245. if (!REQ_INDIRECT(flags) && (baseflags & ND_PIL)) {
  1246. PyErr_SetString(PyExc_BufferError,
  1247. "ndarray cannot be represented without suboffsets");
  1248. return -1;
  1249. }
  1250. if (!REQ_STRIDES(flags)) {
  1251. if (!ND_C_CONTIGUOUS(baseflags)) {
  1252. PyErr_SetString(PyExc_BufferError,
  1253. "ndarray is not C-contiguous");
  1254. return -1;
  1255. }
  1256. view->strides = NULL;
  1257. }
  1258. if (!REQ_SHAPE(flags)) {
  1259. /* PyBUF_SIMPLE or PyBUF_WRITABLE: at this point buf is C-contiguous,
  1260. so base->buf = ndbuf->data. */
  1261. if (view->format != NULL) {
  1262. /* PyBUF_SIMPLE|PyBUF_FORMAT and PyBUF_WRITABLE|PyBUF_FORMAT do
  1263. not make sense. */
  1264. PyErr_Format(PyExc_BufferError,
  1265. "ndarray: cannot cast to unsigned bytes if the format flag "
  1266. "is present");
  1267. return -1;
  1268. }
  1269. /* product(shape) * itemsize = len and calcsize(format) = itemsize
  1270. do _not_ hold from here on! */
  1271. view->ndim = 1;
  1272. view->shape = NULL;
  1273. }
  1274. /* Ascertain that the new buffer has the same contiguity as the exporter */
  1275. if (ND_C_CONTIGUOUS(baseflags) != PyBuffer_IsContiguous(view, 'C') ||
  1276. /* skip cast to 1-d */
  1277. (view->format != NULL && view->shape != NULL &&
  1278. ND_FORTRAN_CONTIGUOUS(baseflags) != PyBuffer_IsContiguous(view, 'F')) ||
  1279. /* cast to 1-d */
  1280. (view->format == NULL && view->shape == NULL &&
  1281. !PyBuffer_IsContiguous(view, 'F'))) {
  1282. PyErr_SetString(PyExc_BufferError,
  1283. "ndarray: contiguity mismatch in getbuf()");
  1284. return -1;
  1285. }
  1286. view->obj = (PyObject *)self;
  1287. Py_INCREF(view->obj);
  1288. self->head->exports++;
  1289. return 0;
  1290. }
  1291. static void
  1292. ndarray_releasebuf(NDArrayObject *self, Py_buffer *view)
  1293. {
  1294. if (!ND_IS_CONSUMER(self)) {
  1295. ndbuf_t *ndbuf = view->internal;
  1296. if (--ndbuf->exports == 0 && ndbuf != self->head)
  1297. ndbuf_delete(self, ndbuf);
  1298. }
  1299. }
  1300. static PyBufferProcs ndarray_as_buffer = {
  1301. (getbufferproc)ndarray_getbuf, /* bf_getbuffer */
  1302. (releasebufferproc)ndarray_releasebuf /* bf_releasebuffer */
  1303. };
  1304. /**************************************************************************/
  1305. /* indexing/slicing */
  1306. /**************************************************************************/
  1307. static char *
  1308. ptr_from_index(Py_buffer *base, Py_ssize_t index)
  1309. {
  1310. char *ptr;
  1311. Py_ssize_t nitems; /* items in the first dimension */
  1312. if (base->shape)
  1313. nitems = base->shape[0];
  1314. else {
  1315. assert(base->ndim == 1 && SIMPLE_FORMAT(base->format));
  1316. nitems = base->len;
  1317. }
  1318. if (index < 0) {
  1319. index += nitems;
  1320. }
  1321. if (index < 0 || index >= nitems) {
  1322. PyErr_SetString(PyExc_IndexError, "index out of bounds");
  1323. return NULL;
  1324. }
  1325. ptr = (char *)base->buf;
  1326. if (base->strides == NULL)
  1327. ptr += base->itemsize * index;
  1328. else
  1329. ptr += base->strides[0] * index;
  1330. ptr = ADJUST_PTR(ptr, base->suboffsets);
  1331. return ptr;
  1332. }
  1333. static PyObject *
  1334. ndarray_item(NDArrayObject *self, Py_ssize_t index)
  1335. {
  1336. ndbuf_t *ndbuf = self->head;
  1337. Py_buffer *base = &ndbuf->base;
  1338. char *ptr;
  1339. if (base->ndim == 0) {
  1340. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1341. return NULL;
  1342. }
  1343. ptr = ptr_from_index(base, index);
  1344. if (ptr == NULL)
  1345. return NULL;
  1346. if (base->ndim == 1) {
  1347. return unpack_single(ptr, base->format, base->itemsize);
  1348. }
  1349. else {
  1350. NDArrayObject *nd;
  1351. Py_buffer *subview;
  1352. nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
  1353. if (nd == NULL)
  1354. return NULL;
  1355. if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
  1356. Py_DECREF(nd);
  1357. return NULL;
  1358. }
  1359. subview = &nd->staticbuf.base;
  1360. subview->buf = ptr;
  1361. subview->len /= subview->shape[0];
  1362. subview->ndim--;
  1363. subview->shape++;
  1364. if (subview->strides) subview->strides++;
  1365. if (subview->suboffsets) subview->suboffsets++;
  1366. init_flags(&nd->staticbuf);
  1367. return (PyObject *)nd;
  1368. }
  1369. }
  1370. /*
  1371. For each dimension, we get valid (start, stop, step, slicelength) quadruples
  1372. from PySlice_GetIndicesEx().
  1373. Slicing NumPy arrays
  1374. ====================
  1375. A pointer to an element in a NumPy array is defined by:
  1376. ptr = (char *)buf + indices[0] * strides[0] +
  1377. ... +
  1378. indices[ndim-1] * strides[ndim-1]
  1379. Adjust buf:
  1380. -----------
  1381. Adding start[n] for each dimension effectively adds the constant:
  1382. c = start[0] * strides[0] + ... + start[ndim-1] * strides[ndim-1]
  1383. Therefore init_slice() adds all start[n] directly to buf.
  1384. Adjust shape:
  1385. -------------
  1386. Obviously shape[n] = slicelength[n]
  1387. Adjust strides:
  1388. ---------------
  1389. In the original array, the next element in a dimension is reached
  1390. by adding strides[n] to the pointer. In the sliced array, elements
  1391. may be skipped, so the next element is reached by adding:
  1392. strides[n] * step[n]
  1393. Slicing PIL arrays
  1394. ==================
  1395. Layout:
  1396. -------
  1397. In the first (zeroth) dimension, PIL arrays have an array of pointers
  1398. to sub-arrays of ndim-1. Striding in the first dimension is done by
  1399. getting the index of the nth pointer, dereference it and then add a
  1400. suboffset to it. The arrays pointed to can best be seen a regular
  1401. NumPy arrays.
  1402. Adjust buf:
  1403. -----------
  1404. In the original array, buf points to a location (usually the start)
  1405. in the array of pointers. For the sliced array, start[0] can be
  1406. added to buf in the same manner as for NumPy arrays.
  1407. Adjust suboffsets:
  1408. ------------------
  1409. Due to the dereferencing step in the addressing scheme, it is not
  1410. possible to adjust buf for higher dimensions. Recall that the
  1411. sub-arrays pointed to are regular NumPy arrays, so for each of
  1412. those arrays adding start[n] effectively adds the constant:
  1413. c = start[1] * strides[1] + ... + start[ndim-1] * strides[ndim-1]
  1414. This constant is added to suboffsets[0]. suboffsets[0] in turn is
  1415. added to each pointer right after dereferencing.
  1416. Adjust shape and strides:
  1417. -------------------------
  1418. Shape and strides are not influenced by the dereferencing step, so
  1419. they are adjusted in the same manner as for NumPy arrays.
  1420. Multiple levels of suboffsets
  1421. =============================
  1422. For a construct like an array of pointers to array of pointers to
  1423. sub-arrays of ndim-2:
  1424. suboffsets[0] = start[1] * strides[1]
  1425. suboffsets[1] = start[2] * strides[2] + ...
  1426. */
  1427. static int
  1428. init_slice(Py_buffer *base, PyObject *key, int dim)
  1429. {
  1430. Py_ssize_t start, stop, step, slicelength;
  1431. if (PySlice_Unpack(key, &start, &stop, &step) < 0) {
  1432. return -1;
  1433. }
  1434. slicelength = PySlice_AdjustIndices(base->shape[dim], &start, &stop, step);
  1435. if (base->suboffsets == NULL || dim == 0) {
  1436. adjust_buf:
  1437. base->buf = (char *)base->buf + base->strides[dim] * start;
  1438. }
  1439. else {
  1440. Py_ssize_t n = dim-1;
  1441. while (n >= 0 && base->suboffsets[n] < 0)
  1442. n--;
  1443. if (n < 0)
  1444. goto adjust_buf; /* all suboffsets are negative */
  1445. base->suboffsets[n] = base->suboffsets[n] + base->strides[dim] * start;
  1446. }
  1447. base->shape[dim] = slicelength;
  1448. base->strides[dim] = base->strides[dim] * step;
  1449. return 0;
  1450. }
  1451. static int
  1452. copy_structure(Py_buffer *base)
  1453. {
  1454. Py_ssize_t *shape = NULL, *strides = NULL, *suboffsets = NULL;
  1455. Py_ssize_t i;
  1456. shape = PyMem_Malloc(base->ndim * (sizeof *shape));
  1457. strides = PyMem_Malloc(base->ndim * (sizeof *strides));
  1458. if (shape == NULL || strides == NULL)
  1459. goto err_nomem;
  1460. suboffsets = NULL;
  1461. if (base->suboffsets) {
  1462. suboffsets = PyMem_Malloc(base->ndim * (sizeof *suboffsets));
  1463. if (suboffsets == NULL)
  1464. goto err_nomem;
  1465. }
  1466. for (i = 0; i < base->ndim; i++) {
  1467. shape[i] = base->shape[i];
  1468. strides[i] = base->strides[i];
  1469. if (suboffsets)
  1470. suboffsets[i] = base->suboffsets[i];
  1471. }
  1472. base->shape = shape;
  1473. base->strides = strides;
  1474. base->suboffsets = suboffsets;
  1475. return 0;
  1476. err_nomem:
  1477. PyErr_NoMemory();
  1478. PyMem_XFree(shape);
  1479. PyMem_XFree(strides);
  1480. PyMem_XFree(suboffsets);
  1481. return -1;
  1482. }
  1483. static PyObject *
  1484. ndarray_subscript(NDArrayObject *self, PyObject *key)
  1485. {
  1486. NDArrayObject *nd;
  1487. ndbuf_t *ndbuf;
  1488. Py_buffer *base = &self->head->base;
  1489. if (base->ndim == 0) {
  1490. if (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0) {
  1491. return unpack_single(base->buf, base->format, base->itemsize);
  1492. }
  1493. else if (key == Py_Ellipsis) {
  1494. Py_INCREF(self);
  1495. return (PyObject *)self;
  1496. }
  1497. else {
  1498. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1499. return NULL;
  1500. }
  1501. }
  1502. if (PyIndex_Check(key)) {
  1503. Py_ssize_t index = PyLong_AsSsize_t(key);
  1504. if (index == -1 && PyErr_Occurred())
  1505. return NULL;
  1506. return ndarray_item(self, index);
  1507. }
  1508. nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
  1509. if (nd == NULL)
  1510. return NULL;
  1511. /* new ndarray is a consumer */
  1512. if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
  1513. Py_DECREF(nd);
  1514. return NULL;
  1515. }
  1516. /* copy shape, strides and suboffsets */
  1517. ndbuf = nd->head;
  1518. base = &ndbuf->base;
  1519. if (copy_structure(base) < 0) {
  1520. Py_DECREF(nd);
  1521. return NULL;
  1522. }
  1523. ndbuf->flags |= ND_OWN_ARRAYS;
  1524. if (PySlice_Check(key)) {
  1525. /* one-dimensional slice */
  1526. if (init_slice(base, key, 0) < 0)
  1527. goto err_occurred;
  1528. }
  1529. else if (PyTuple_Check(key)) {
  1530. /* multi-dimensional slice */
  1531. PyObject *tuple = key;
  1532. Py_ssize_t i, n;
  1533. n = PyTuple_GET_SIZE(tuple);
  1534. for (i = 0; i < n; i++) {
  1535. key = PyTuple_GET_ITEM(tuple, i);
  1536. if (!PySlice_Check(key))
  1537. goto type_error;
  1538. if (init_slice(base, key, (int)i) < 0)
  1539. goto err_occurred;
  1540. }
  1541. }
  1542. else {
  1543. goto type_error;
  1544. }
  1545. init_len(base);
  1546. init_flags(ndbuf);
  1547. return (PyObject *)nd;
  1548. type_error:
  1549. PyErr_Format(PyExc_TypeError,
  1550. "cannot index memory using \"%.200s\"",
  1551. Py_TYPE(key)->tp_name);
  1552. err_occurred:
  1553. Py_DECREF(nd);
  1554. return NULL;
  1555. }
  1556. static int
  1557. ndarray_ass_subscript(NDArrayObject *self, PyObject *key, PyObject *value)
  1558. {
  1559. NDArrayObject *nd;
  1560. Py_buffer *dest = &self->head->base;
  1561. Py_buffer src;
  1562. char *ptr;
  1563. Py_ssize_t index;
  1564. int ret = -1;
  1565. if (dest->readonly) {
  1566. PyErr_SetString(PyExc_TypeError, "ndarray is not writable");
  1567. return -1;
  1568. }
  1569. if (value == NULL) {
  1570. PyErr_SetString(PyExc_TypeError, "ndarray data cannot be deleted");
  1571. return -1;
  1572. }
  1573. if (dest->ndim == 0) {
  1574. if (key == Py_Ellipsis ||
  1575. (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0)) {
  1576. ptr = (char *)dest->buf;
  1577. return pack_single(ptr, value, dest->format, dest->itemsize);
  1578. }
  1579. else {
  1580. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1581. return -1;
  1582. }
  1583. }
  1584. if (dest->ndim == 1 && PyIndex_Check(key)) {
  1585. /* rvalue must be a single item */
  1586. index = PyLong_AsSsize_t(key);
  1587. if (index == -1 && PyErr_Occurred())
  1588. return -1;
  1589. else {
  1590. ptr = ptr_from_index(dest, index);
  1591. if (ptr == NULL)
  1592. return -1;
  1593. }
  1594. return pack_single(ptr, value, dest->format, dest->itemsize);
  1595. }
  1596. /* rvalue must be an exporter */
  1597. if (PyObject_GetBuffer(value, &src, PyBUF_FULL_RO) == -1)
  1598. return -1;
  1599. nd = (NDArrayObject *)ndarray_subscript(self, key);
  1600. if (nd != NULL) {
  1601. dest = &nd->head->base;
  1602. ret = copy_buffer(dest, &src);
  1603. Py_DECREF(nd);
  1604. }
  1605. PyBuffer_Release(&src);
  1606. return ret;
  1607. }
  1608. static PyObject *
  1609. slice_indices(PyObject *self, PyObject *args)
  1610. {
  1611. PyObject *ret, *key, *tmp;
  1612. Py_ssize_t s[4]; /* start, stop, step, slicelength */
  1613. Py_ssize_t i, len;
  1614. if (!PyArg_ParseTuple(args, "On", &key, &len)) {
  1615. return NULL;
  1616. }
  1617. if (!PySlice_Check(key)) {
  1618. PyErr_SetString(PyExc_TypeError,
  1619. "first argument must be a slice object");
  1620. return NULL;
  1621. }
  1622. if (PySlice_Unpack(key, &s[0], &s[1], &s[2]) < 0) {
  1623. return NULL;
  1624. }
  1625. s[3] = PySlice_AdjustIndices(len, &s[0], &s[1], s[2]);
  1626. ret = PyTuple_New(4);
  1627. if (ret == NULL)
  1628. return NULL;
  1629. for (i = 0; i < 4; i++) {
  1630. tmp = PyLong_FromSsize_t(s[i]);
  1631. if (tmp == NULL)
  1632. goto error;
  1633. PyTuple_SET_ITEM(ret, i, tmp);
  1634. }
  1635. return ret;
  1636. error:
  1637. Py_DECREF(ret);
  1638. return NULL;
  1639. }
  1640. static PyMappingMethods ndarray_as_mapping = {
  1641. NULL, /* mp_length */
  1642. (binaryfunc)ndarray_subscript, /* mp_subscript */
  1643. (objobjargproc)ndarray_ass_subscript /* mp_ass_subscript */
  1644. };
  1645. static PySequenceMethods ndarray_as_sequence = {
  1646. 0, /* sq_length */
  1647. 0, /* sq_concat */
  1648. 0, /* sq_repeat */
  1649. (ssizeargfunc)ndarray_item, /* sq_item */
  1650. };
  1651. /**************************************************************************/
  1652. /* getters */
  1653. /**************************************************************************/
  1654. static PyObject *
  1655. ssize_array_as_tuple(Py_ssize_t *array, Py_ssize_t len)
  1656. {
  1657. PyObject *tuple, *x;
  1658. Py_ssize_t i;
  1659. if (array == NULL)
  1660. return PyTuple_New(0);
  1661. tuple = PyTuple_New(len);
  1662. if (tuple == NULL)
  1663. return NULL;
  1664. for (i = 0; i < len; i++) {
  1665. x = PyLong_FromSsize_t(array[i]);
  1666. if (x == NULL) {
  1667. Py_DECREF(tuple);
  1668. return NULL;
  1669. }
  1670. PyTuple_SET_ITEM(tuple, i, x);
  1671. }
  1672. return tuple;
  1673. }
  1674. static PyObject *
  1675. ndarray_get_flags(NDArrayObject *self, void *closure)
  1676. {
  1677. return PyLong_FromLong(self->head->flags);
  1678. }
  1679. static PyObject *
  1680. ndarray_get_offset(NDArrayObject *self, void *closure)
  1681. {
  1682. ndbuf_t *ndbuf = self->head;
  1683. return PyLong_FromSsize_t(ndbuf->offset);
  1684. }
  1685. static PyObject *
  1686. ndarray_get_obj(NDArrayObject *self, void *closure)
  1687. {
  1688. Py_buffer *base = &self->head->base;
  1689. if (base->obj == NULL) {
  1690. Py_RETURN_NONE;
  1691. }
  1692. Py_INCREF(base->obj);
  1693. return base->obj;
  1694. }
  1695. static PyObject *
  1696. ndarray_get_nbytes(NDArrayObject *self, void *closure)
  1697. {
  1698. Py_buffer *base = &self->head->base;
  1699. return PyLong_FromSsize_t(base->len);
  1700. }
  1701. static PyObject *
  1702. ndarray_get_readonly(NDArrayObject *self, void *closure)
  1703. {
  1704. Py_buffer *base = &self->head->base;
  1705. return PyBool_FromLong(base->readonly);
  1706. }
  1707. static PyObject *
  1708. ndarray_get_itemsize(NDArrayObject *self, void *closure)
  1709. {
  1710. Py_buffer *base = &self->head->base;
  1711. return PyLong_FromSsize_t(base->itemsize);
  1712. }
  1713. static PyObject *
  1714. ndarray_get_format(NDArrayObject *self, void *closure)
  1715. {
  1716. Py_buffer *base = &self->head->base;
  1717. const char *fmt = base->format ? base->format : "";
  1718. return PyUnicode_FromString(fmt);
  1719. }
  1720. static PyObject *
  1721. ndarray_get_ndim(NDArrayObject *self, void *closure)
  1722. {
  1723. Py_buffer *base = &self->head->base;
  1724. return PyLong_FromSsize_t(base->ndim);
  1725. }
  1726. static PyObject *
  1727. ndarray_get_shape(NDArrayObject *self, void *closure)
  1728. {
  1729. Py_buffer *base = &self->head->base;
  1730. return ssize_array_as_tuple(base->shape, base->ndim);
  1731. }
  1732. static PyObject *
  1733. ndarray_get_strides(NDArrayObject *self, void *closure)
  1734. {
  1735. Py_buffer *base = &self->head->base;
  1736. return ssize_array_as_tuple(base->strides, base->ndim);
  1737. }
  1738. static PyObject *
  1739. ndarray_get_suboffsets(NDArrayObject *self, void *closure)
  1740. {
  1741. Py_buffer *base = &self->head->base;
  1742. return ssize_array_as_tuple(base->suboffsets, base->ndim);
  1743. }
  1744. static PyObject *
  1745. ndarray_c_contig(PyObject *self, PyObject *dummy)
  1746. {
  1747. NDArrayObject *nd = (NDArrayObject *)self;
  1748. int ret = PyBuffer_IsContiguous(&nd->head->base, 'C');
  1749. if (ret != ND_C_CONTIGUOUS(nd->head->flags)) {
  1750. PyErr_SetString(PyExc_RuntimeError,
  1751. "results from PyBuffer_IsContiguous() and flags differ");
  1752. return NULL;
  1753. }
  1754. return PyBool_FromLong(ret);
  1755. }
  1756. static PyObject *
  1757. ndarray_fortran_contig(PyObject *self, PyObject *dummy)
  1758. {
  1759. NDArrayObject *nd = (NDArrayObject *)self;
  1760. int ret = PyBuffer_IsContiguous(&nd->head->base, 'F');
  1761. if (ret != ND_FORTRAN_CONTIGUOUS(nd->head->flags)) {
  1762. PyErr_SetString(PyExc_RuntimeError,
  1763. "results from PyBuffer_IsContiguous() and flags differ");
  1764. return NULL;
  1765. }
  1766. return PyBool_FromLong(ret);
  1767. }
  1768. static PyObject *
  1769. ndarray_contig(PyObject *self, PyObject *dummy)
  1770. {
  1771. NDArrayObject *nd = (NDArrayObject *)self;
  1772. int ret = PyBuffer_IsContiguous(&nd->head->base, 'A');
  1773. if (ret != ND_ANY_CONTIGUOUS(nd->head->flags)) {
  1774. PyErr_SetString(PyExc_RuntimeError,
  1775. "results from PyBuffer_IsContiguous() and flags differ");
  1776. return NULL;
  1777. }
  1778. return PyBool_FromLong(ret);
  1779. }
  1780. static PyGetSetDef ndarray_getset [] =
  1781. {
  1782. /* ndbuf */
  1783. { "flags", (getter)ndarray_get_flags, NULL, NULL, NULL},
  1784. { "offset", (getter)ndarray_get_offset, NULL, NULL, NULL},
  1785. /* ndbuf.base */
  1786. { "obj", (getter)ndarray_get_obj, NULL, NULL, NULL},
  1787. { "nbytes", (getter)ndarray_get_nbytes, NULL, NULL, NULL},
  1788. { "readonly", (getter)ndarray_get_readonly, NULL, NULL, NULL},
  1789. { "itemsize", (getter)ndarray_get_itemsize, NULL, NULL, NULL},
  1790. { "format", (getter)ndarray_get_format, NULL, NULL, NULL},
  1791. { "ndim", (getter)ndarray_get_ndim, NULL, NULL, NULL},
  1792. { "shape", (getter)ndarray_get_shape, NULL, NULL, NULL},
  1793. { "strides", (getter)ndarray_get_strides, NULL, NULL, NULL},
  1794. { "suboffsets", (getter)ndarray_get_suboffsets, NULL, NULL, NULL},
  1795. { "c_contiguous", (getter)ndarray_c_contig, NULL, NULL, NULL},
  1796. { "f_contiguous", (getter)ndarray_fortran_contig, NULL, NULL, NULL},
  1797. { "contiguous", (getter)ndarray_contig, NULL, NULL, NULL},
  1798. {NULL}
  1799. };
  1800. static PyObject *
  1801. ndarray_tolist(PyObject *self, PyObject *dummy)
  1802. {
  1803. return ndarray_as_list((NDArrayObject *)self);
  1804. }
  1805. static PyObject *
  1806. ndarray_tobytes(PyObject *self, PyObject *dummy)
  1807. {
  1808. ndbuf_t *ndbuf = ((NDArrayObject *)self)->head;
  1809. Py_buffer *src = &ndbuf->base;
  1810. Py_buffer dest;
  1811. PyObject *ret = NULL;
  1812. char *mem;
  1813. if (ND_C_CONTIGUOUS(ndbuf->flags))
  1814. return PyBytes_FromStringAndSize(src->buf, src->len);
  1815. assert(src->shape != NULL);
  1816. assert(src->strides != NULL);
  1817. assert(src->ndim > 0);
  1818. mem = PyMem_Malloc(src->len);
  1819. if (mem == NULL) {
  1820. PyErr_NoMemory();
  1821. return NULL;
  1822. }
  1823. dest = *src;
  1824. dest.buf = mem;
  1825. dest.suboffsets = NULL;
  1826. dest.strides = strides_from_shape(ndbuf, 0);
  1827. if (dest.strides == NULL)
  1828. goto out;
  1829. if (copy_buffer(&dest, src) < 0)
  1830. goto out;
  1831. ret = PyBytes_FromStringAndSize(mem, src->len);
  1832. out:
  1833. PyMem_XFree(dest.strides);
  1834. PyMem_Free(mem);
  1835. return ret;
  1836. }
  1837. /* add redundant (negative) suboffsets for testing */
  1838. static PyObject *
  1839. ndarray_add_suboffsets(PyObject *self, PyObject *dummy)
  1840. {
  1841. NDArrayObject *nd = (NDArrayObject *)self;
  1842. Py_buffer *base = &nd->head->base;
  1843. Py_ssize_t i;
  1844. if (base->suboffsets != NULL) {
  1845. PyErr_SetString(PyExc_TypeError,
  1846. "cannot add suboffsets to PIL-style array");
  1847. return NULL;
  1848. }
  1849. if (base->strides == NULL) {
  1850. PyErr_SetString(PyExc_TypeError,
  1851. "cannot add suboffsets to array without strides");
  1852. return NULL;
  1853. }
  1854. base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
  1855. if (base->suboffsets == NULL) {
  1856. PyErr_NoMemory();
  1857. return NULL;
  1858. }
  1859. for (i = 0; i < base->ndim; i++)
  1860. base->suboffsets[i] = -1;
  1861. nd->head->flags &= ~(ND_C|ND_FORTRAN);
  1862. Py_RETURN_NONE;
  1863. }
  1864. /* Test PyMemoryView_FromBuffer(): return a memoryview from a static buffer.
  1865. Obviously this is fragile and only one such view may be active at any
  1866. time. Never use anything like this in real code! */
  1867. static char *infobuf = NULL;
  1868. static PyObject *
  1869. ndarray_memoryview_from_buffer(PyObject *self, PyObject *dummy)
  1870. {
  1871. const NDArrayObject *nd = (NDArrayObject *)self;
  1872. const Py_buffer *view = &nd->head->base;
  1873. const ndbuf_t *ndbuf;
  1874. static char format[ND_MAX_NDIM+1];
  1875. static Py_ssize_t shape[ND_MAX_NDIM];
  1876. static Py_ssize_t strides[ND_MAX_NDIM];
  1877. static Py_ssize_t suboffsets[ND_MAX_NDIM];
  1878. static Py_buffer info;
  1879. char *p;
  1880. if (!ND_IS_CONSUMER(nd))
  1881. ndbuf = nd->head; /* self is ndarray/original exporter */
  1882. else if (NDArray_Check(view->obj) && !ND_IS_CONSUMER(view->obj))
  1883. /* self is ndarray and consumer from ndarray/original exporter */
  1884. ndbuf = ((NDArrayObject *)view->obj)->head;
  1885. else {
  1886. PyErr_SetString(PyExc_TypeError,
  1887. "memoryview_from_buffer(): ndarray must be original exporter or "
  1888. "consumer from ndarray/original exporter");
  1889. return NULL;
  1890. }
  1891. info = *view;
  1892. p = PyMem_Realloc(infobuf, ndbuf->len);
  1893. if (p == NULL) {
  1894. PyMem_Free(infobuf);
  1895. PyErr_NoMemory();
  1896. infobuf = NULL;
  1897. return NULL;
  1898. }
  1899. else {
  1900. infobuf = p;
  1901. }
  1902. /* copy the complete raw data */
  1903. memcpy(infobuf, ndbuf->data, ndbuf->len);
  1904. info.buf = infobuf + ((char *)view->buf - ndbuf->data);
  1905. if (view->format) {
  1906. if (strlen(view->format) > ND_MAX_NDIM) {
  1907. PyErr_Format(PyExc_TypeError,
  1908. "memoryview_from_buffer: format is limited to %d characters",
  1909. ND_MAX_NDIM);
  1910. return NULL;
  1911. }
  1912. strcpy(format, view->format);
  1913. info.format = format;
  1914. }
  1915. if (view->ndim > ND_MAX_NDIM) {
  1916. PyErr_Format(PyExc_TypeError,
  1917. "memoryview_from_buffer: ndim is limited to %d", ND_MAX_NDIM);
  1918. return NULL;
  1919. }
  1920. if (view->shape) {
  1921. memcpy(shape, view->shape, view->ndim * sizeof(Py_ssize_t));
  1922. info.shape = shape;
  1923. }
  1924. if (view->strides) {
  1925. memcpy(strides, view->strides, view->ndim * sizeof(Py_ssize_t));
  1926. info.strides = strides;
  1927. }
  1928. if (view->suboffsets) {
  1929. memcpy(suboffsets, view->suboffsets, view->ndim * sizeof(Py_ssize_t));
  1930. info.suboffsets = suboffsets;
  1931. }
  1932. return PyMemoryView_FromBuffer(&info);
  1933. }
  1934. /* Get a single item from bufobj at the location specified by seq.
  1935. seq is a list or tuple of indices. The purpose of this function
  1936. is to check other functions against PyBuffer_GetPointer(). */
  1937. static PyObject *
  1938. get_pointer(PyObject *self, PyObject *args)
  1939. {
  1940. PyObject *ret = NULL, *bufobj, *seq;
  1941. Py_buffer view;
  1942. Py_ssize_t indices[ND_MAX_NDIM];
  1943. Py_ssize_t i;
  1944. void *ptr;
  1945. if (!PyArg_ParseTuple(args, "OO", &bufobj, &seq)) {
  1946. return NULL;
  1947. }
  1948. CHECK_LIST_OR_TUPLE(seq);
  1949. if (PyObject_GetBuffer(bufobj, &view, PyBUF_FULL_RO) < 0)
  1950. return NULL;
  1951. if (view.ndim > ND_MAX_NDIM) {
  1952. PyErr_Format(PyExc_ValueError,
  1953. "get_pointer(): ndim > %d", ND_MAX_NDIM);
  1954. goto out;
  1955. }
  1956. if (PySequence_Fast_GET_SIZE(seq) != view.ndim) {
  1957. PyErr_SetString(PyExc_ValueError,
  1958. "get_pointer(): len(indices) != ndim");
  1959. goto out;
  1960. }
  1961. for (i = 0; i < view.ndim; i++) {
  1962. PyObject *x = PySequence_Fast_GET_ITEM(seq, i);
  1963. indices[i] = PyLong_AsSsize_t(x);
  1964. if (PyErr_Occurred())
  1965. goto out;
  1966. if (indices[i] < 0 || indices[i] >= view.shape[i]) {
  1967. PyErr_Format(PyExc_ValueError,
  1968. "get_pointer(): invalid index %zd at position %zd",
  1969. indices[i], i);
  1970. goto out;
  1971. }
  1972. }
  1973. ptr = PyBuffer_GetPointer(&view, indices);
  1974. ret = unpack_single(ptr, view.format, view.itemsize);
  1975. out:
  1976. PyBuffer_Release(&view);
  1977. return ret;
  1978. }
  1979. static PyObject *
  1980. get_sizeof_void_p(PyObject *self, PyObject *Py_UNUSED(ignored))
  1981. {
  1982. return PyLong_FromSize_t(sizeof(void *));
  1983. }
  1984. static char
  1985. get_ascii_order(PyObject *order)
  1986. {
  1987. PyObject *ascii_order;
  1988. char ord;
  1989. if (!PyUnicode_Check(order)) {
  1990. PyErr_SetString(PyExc_TypeError,
  1991. "order must be a string");
  1992. return CHAR_MAX;
  1993. }
  1994. ascii_order = PyUnicode_AsASCIIString(order);
  1995. if (ascii_order == NULL) {
  1996. return CHAR_MAX;
  1997. }
  1998. ord = PyBytes_AS_STRING(ascii_order)[0];
  1999. Py_DECREF(ascii_order);
  2000. if (ord != 'C' && ord != 'F' && ord != 'A') {
  2001. PyErr_SetString(PyExc_ValueError,
  2002. "invalid order, must be C, F or A");
  2003. return CHAR_MAX;
  2004. }
  2005. return ord;
  2006. }
  2007. /* Get a contiguous memoryview. */
  2008. static PyObject *
  2009. get_contiguous(PyObject *self, PyObject *args)
  2010. {
  2011. PyObject *obj;
  2012. PyObject *buffertype;
  2013. PyObject *order;
  2014. long type;
  2015. char ord;
  2016. if (!PyArg_ParseTuple(args, "OOO", &obj, &buffertype, &order)) {
  2017. return NULL;
  2018. }
  2019. if (!PyLong_Check(buffertype)) {
  2020. PyErr_SetString(PyExc_TypeError,
  2021. "buffertype must be PyBUF_READ or PyBUF_WRITE");
  2022. return NULL;
  2023. }
  2024. type = PyLong_AsLong(buffertype);
  2025. if (type == -1 && PyErr_Occurred()) {
  2026. return NULL;
  2027. }
  2028. if (type != PyBUF_READ && type != PyBUF_WRITE) {
  2029. PyErr_SetString(PyExc_ValueError,
  2030. "invalid buffer type");
  2031. return NULL;
  2032. }
  2033. ord = get_ascii_order(order);
  2034. if (ord == CHAR_MAX)
  2035. return NULL;
  2036. return PyMemoryView_GetContiguous(obj, (int)type, ord);
  2037. }
  2038. /* PyBuffer_ToContiguous() */
  2039. static PyObject *
  2040. py_buffer_to_contiguous(PyObject *self, PyObject *args)
  2041. {
  2042. PyObject *obj;
  2043. PyObject *order;
  2044. PyObject *ret = NULL;
  2045. int flags;
  2046. char ord;
  2047. Py_buffer view;
  2048. char *buf = NULL;
  2049. if (!PyArg_ParseTuple(args, "OOi", &obj, &order, &flags)) {
  2050. return NULL;
  2051. }
  2052. if (PyObject_GetBuffer(obj, &view, flags) < 0) {
  2053. return NULL;
  2054. }
  2055. ord = get_ascii_order(order);
  2056. if (ord == CHAR_MAX) {
  2057. goto out;
  2058. }
  2059. buf = PyMem_Malloc(view.len);
  2060. if (buf == NULL) {
  2061. PyErr_NoMemory();
  2062. goto out;
  2063. }
  2064. if (PyBuffer_ToContiguous(buf, &view, view.len, ord) < 0) {
  2065. goto out;
  2066. }
  2067. ret = PyBytes_FromStringAndSize(buf, view.len);
  2068. out:
  2069. PyBuffer_Release(&view);
  2070. PyMem_XFree(buf);
  2071. return ret;
  2072. }
  2073. static int
  2074. fmtcmp(const char *fmt1, const char *fmt2)
  2075. {
  2076. if (fmt1 == NULL) {
  2077. return fmt2 == NULL || strcmp(fmt2, "B") == 0;
  2078. }
  2079. if (fmt2 == NULL) {
  2080. return fmt1 == NULL || strcmp(fmt1, "B") == 0;
  2081. }
  2082. return strcmp(fmt1, fmt2) == 0;
  2083. }
  2084. static int
  2085. arraycmp(const Py_ssize_t *a1, const Py_ssize_t *a2, const Py_ssize_t *shape,
  2086. Py_ssize_t ndim)
  2087. {
  2088. Py_ssize_t i;
  2089. for (i = 0; i < ndim; i++) {
  2090. if (shape && shape[i] <= 1) {
  2091. /* strides can differ if the dimension is less than 2 */
  2092. continue;
  2093. }
  2094. if (a1[i] != a2[i]) {
  2095. return 0;
  2096. }
  2097. }
  2098. return 1;
  2099. }
  2100. /* Compare two contiguous buffers for physical equality. */
  2101. static PyObject *
  2102. cmp_contig(PyObject *self, PyObject *args)
  2103. {
  2104. PyObject *b1, *b2; /* buffer objects */
  2105. Py_buffer v1, v2;
  2106. PyObject *ret;
  2107. int equal = 0;
  2108. if (!PyArg_ParseTuple(args, "OO", &b1, &b2)) {
  2109. return NULL;
  2110. }
  2111. if (PyObject_GetBuffer(b1, &v1, PyBUF_FULL_RO) < 0) {
  2112. PyErr_SetString(PyExc_TypeError,
  2113. "cmp_contig: first argument does not implement the buffer "
  2114. "protocol");
  2115. return NULL;
  2116. }
  2117. if (PyObject_GetBuffer(b2, &v2, PyBUF_FULL_RO) < 0) {
  2118. PyErr_SetString(PyExc_TypeError,
  2119. "cmp_contig: second argument does not implement the buffer "
  2120. "protocol");
  2121. PyBuffer_Release(&v1);
  2122. return NULL;
  2123. }
  2124. if (!(PyBuffer_IsContiguous(&v1, 'C')&&PyBuffer_IsContiguous(&v2, 'C')) &&
  2125. !(PyBuffer_IsContiguous(&v1, 'F')&&PyBuffer_IsContiguous(&v2, 'F'))) {
  2126. goto result;
  2127. }
  2128. /* readonly may differ if created from non-contiguous */
  2129. if (v1.len != v2.len ||
  2130. v1.itemsize != v2.itemsize ||
  2131. v1.ndim != v2.ndim ||
  2132. !fmtcmp(v1.format, v2.format) ||
  2133. !!v1.shape != !!v2.shape ||
  2134. !!v1.strides != !!v2.strides ||
  2135. !!v1.suboffsets != !!v2.suboffsets) {
  2136. goto result;
  2137. }
  2138. if ((v1.shape && !arraycmp(v1.shape, v2.shape, NULL, v1.ndim)) ||
  2139. (v1.strides && !arraycmp(v1.strides, v2.strides, v1.shape, v1.ndim)) ||
  2140. (v1.suboffsets && !arraycmp(v1.suboffsets, v2.suboffsets, NULL,
  2141. v1.ndim))) {
  2142. goto result;
  2143. }
  2144. if (memcmp((char *)v1.buf, (char *)v2.buf, v1.len) != 0) {
  2145. goto result;
  2146. }
  2147. equal = 1;
  2148. result:
  2149. PyBuffer_Release(&v1);
  2150. PyBuffer_Release(&v2);
  2151. ret = equal ? Py_True : Py_False;
  2152. Py_INCREF(ret);
  2153. return ret;
  2154. }
  2155. static PyObject *
  2156. is_contiguous(PyObject *self, PyObject *args)
  2157. {
  2158. PyObject *obj;
  2159. PyObject *order;
  2160. PyObject *ret = NULL;
  2161. Py_buffer view, *base;
  2162. char ord;
  2163. if (!PyArg_ParseTuple(args, "OO", &obj, &order)) {
  2164. return NULL;
  2165. }
  2166. ord = get_ascii_order(order);
  2167. if (ord == CHAR_MAX) {
  2168. return NULL;
  2169. }
  2170. if (NDArray_Check(obj)) {
  2171. /* Skip the buffer protocol to check simple etc. buffers directly. */
  2172. base = &((NDArrayObject *)obj)->head->base;
  2173. ret = PyBuffer_IsContiguous(base, ord) ? Py_True : Py_False;
  2174. }
  2175. else {
  2176. if (PyObject_GetBuffer(obj, &view, PyBUF_FULL_RO) < 0) {
  2177. PyErr_SetString(PyExc_TypeError,
  2178. "is_contiguous: object does not implement the buffer "
  2179. "protocol");
  2180. return NULL;
  2181. }
  2182. ret = PyBuffer_IsContiguous(&view, ord) ? Py_True : Py_False;
  2183. PyBuffer_Release(&view);
  2184. }
  2185. Py_INCREF(ret);
  2186. return ret;
  2187. }
  2188. static Py_hash_t
  2189. ndarray_hash(PyObject *self)
  2190. {
  2191. const NDArrayObject *nd = (NDArrayObject *)self;
  2192. const Py_buffer *view = &nd->head->base;
  2193. PyObject *bytes;
  2194. Py_hash_t hash;
  2195. if (!view->readonly) {
  2196. PyErr_SetString(PyExc_ValueError,
  2197. "cannot hash writable ndarray object");
  2198. return -1;
  2199. }
  2200. if (view->obj != NULL && PyObject_Hash(view->obj) == -1) {
  2201. return -1;
  2202. }
  2203. bytes = ndarray_tobytes(self, NULL);
  2204. if (bytes == NULL) {
  2205. return -1;
  2206. }
  2207. hash = PyObject_Hash(bytes);
  2208. Py_DECREF(bytes);
  2209. return hash;
  2210. }
  2211. static PyMethodDef ndarray_methods [] =
  2212. {
  2213. { "tolist", ndarray_tolist, METH_NOARGS, NULL },
  2214. { "tobytes", ndarray_tobytes, METH_NOARGS, NULL },
  2215. { "push", (PyCFunction)(void(*)(void))ndarray_push, METH_VARARGS|METH_KEYWORDS, NULL },
  2216. { "pop", ndarray_pop, METH_NOARGS, NULL },
  2217. { "add_suboffsets", ndarray_add_suboffsets, METH_NOARGS, NULL },
  2218. { "memoryview_from_buffer", ndarray_memoryview_from_buffer, METH_NOARGS, NULL },
  2219. {NULL}
  2220. };
  2221. static PyTypeObject NDArray_Type = {
  2222. PyVarObject_HEAD_INIT(NULL, 0)
  2223. "ndarray", /* Name of this type */
  2224. sizeof(NDArrayObject), /* Basic object size */
  2225. 0, /* Item size for varobject */
  2226. (destructor)ndarray_dealloc, /* tp_dealloc */
  2227. 0, /* tp_vectorcall_offset */
  2228. 0, /* tp_getattr */
  2229. 0, /* tp_setattr */
  2230. 0, /* tp_as_async */
  2231. 0, /* tp_repr */
  2232. 0, /* tp_as_number */
  2233. &ndarray_as_sequence, /* tp_as_sequence */
  2234. &ndarray_as_mapping, /* tp_as_mapping */
  2235. (hashfunc)ndarray_hash, /* tp_hash */
  2236. 0, /* tp_call */
  2237. 0, /* tp_str */
  2238. PyObject_GenericGetAttr, /* tp_getattro */
  2239. 0, /* tp_setattro */
  2240. &ndarray_as_buffer, /* tp_as_buffer */
  2241. Py_TPFLAGS_DEFAULT, /* tp_flags */
  2242. 0, /* tp_doc */
  2243. 0, /* tp_traverse */
  2244. 0, /* tp_clear */
  2245. 0, /* tp_richcompare */
  2246. 0, /* tp_weaklistoffset */
  2247. 0, /* tp_iter */
  2248. 0, /* tp_iternext */
  2249. ndarray_methods, /* tp_methods */
  2250. 0, /* tp_members */
  2251. ndarray_getset, /* tp_getset */
  2252. 0, /* tp_base */
  2253. 0, /* tp_dict */
  2254. 0, /* tp_descr_get */
  2255. 0, /* tp_descr_set */
  2256. 0, /* tp_dictoffset */
  2257. ndarray_init, /* tp_init */
  2258. 0, /* tp_alloc */
  2259. ndarray_new, /* tp_new */
  2260. };
  2261. /**************************************************************************/
  2262. /* StaticArray Object */
  2263. /**************************************************************************/
  2264. static PyTypeObject StaticArray_Type;
  2265. typedef struct {
  2266. PyObject_HEAD
  2267. int legacy_mode; /* if true, use the view.obj==NULL hack */
  2268. } StaticArrayObject;
  2269. static char static_mem[12] = {0,1,2,3,4,5,6,7,8,9,10,11};
  2270. static Py_ssize_t static_shape[1] = {12};
  2271. static Py_ssize_t static_strides[1] = {1};
  2272. static Py_buffer static_buffer = {
  2273. static_mem, /* buf */
  2274. NULL, /* obj */
  2275. 12, /* len */
  2276. 1, /* itemsize */
  2277. 1, /* readonly */
  2278. 1, /* ndim */
  2279. "B", /* format */
  2280. static_shape, /* shape */
  2281. static_strides, /* strides */
  2282. NULL, /* suboffsets */
  2283. NULL /* internal */
  2284. };
  2285. static PyObject *
  2286. staticarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
  2287. {
  2288. return (PyObject *)PyObject_New(StaticArrayObject, &StaticArray_Type);
  2289. }
  2290. static int
  2291. staticarray_init(PyObject *self, PyObject *args, PyObject *kwds)
  2292. {
  2293. StaticArrayObject *a = (StaticArrayObject *)self;
  2294. static char *kwlist[] = {
  2295. "legacy_mode", NULL
  2296. };
  2297. PyObject *legacy_mode = Py_False;
  2298. if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O", kwlist, &legacy_mode))
  2299. return -1;
  2300. a->legacy_mode = (legacy_mode != Py_False);
  2301. return 0;
  2302. }
  2303. static void
  2304. staticarray_dealloc(StaticArrayObject *self)
  2305. {
  2306. PyObject_Free(self);
  2307. }
  2308. /* Return a buffer for a PyBUF_FULL_RO request. Flags are not checked,
  2309. which makes this object a non-compliant exporter! */
  2310. static int
  2311. staticarray_getbuf(StaticArrayObject *self, Py_buffer *view, int flags)
  2312. {
  2313. *view = static_buffer;
  2314. if (self->legacy_mode) {
  2315. view->obj = NULL; /* Don't use this in new code. */
  2316. }
  2317. else {
  2318. view->obj = (PyObject *)self;
  2319. Py_INCREF(view->obj);
  2320. }
  2321. return 0;
  2322. }
  2323. static PyBufferProcs staticarray_as_buffer = {
  2324. (getbufferproc)staticarray_getbuf, /* bf_getbuffer */
  2325. NULL, /* bf_releasebuffer */
  2326. };
  2327. static PyTypeObject StaticArray_Type = {
  2328. PyVarObject_HEAD_INIT(NULL, 0)
  2329. "staticarray", /* Name of this type */
  2330. sizeof(StaticArrayObject), /* Basic object size */
  2331. 0, /* Item size for varobject */
  2332. (destructor)staticarray_dealloc, /* tp_dealloc */
  2333. 0, /* tp_vectorcall_offset */
  2334. 0, /* tp_getattr */
  2335. 0, /* tp_setattr */
  2336. 0, /* tp_as_async */
  2337. 0, /* tp_repr */
  2338. 0, /* tp_as_number */
  2339. 0, /* tp_as_sequence */
  2340. 0, /* tp_as_mapping */
  2341. 0, /* tp_hash */
  2342. 0, /* tp_call */
  2343. 0, /* tp_str */
  2344. 0, /* tp_getattro */
  2345. 0, /* tp_setattro */
  2346. &staticarray_as_buffer, /* tp_as_buffer */
  2347. Py_TPFLAGS_DEFAULT, /* tp_flags */
  2348. 0, /* tp_doc */
  2349. 0, /* tp_traverse */
  2350. 0, /* tp_clear */
  2351. 0, /* tp_richcompare */
  2352. 0, /* tp_weaklistoffset */
  2353. 0, /* tp_iter */
  2354. 0, /* tp_iternext */
  2355. 0, /* tp_methods */
  2356. 0, /* tp_members */
  2357. 0, /* tp_getset */
  2358. 0, /* tp_base */
  2359. 0, /* tp_dict */
  2360. 0, /* tp_descr_get */
  2361. 0, /* tp_descr_set */
  2362. 0, /* tp_dictoffset */
  2363. staticarray_init, /* tp_init */
  2364. 0, /* tp_alloc */
  2365. staticarray_new, /* tp_new */
  2366. };
  2367. static struct PyMethodDef _testbuffer_functions[] = {
  2368. {"slice_indices", slice_indices, METH_VARARGS, NULL},
  2369. {"get_pointer", get_pointer, METH_VARARGS, NULL},
  2370. {"get_sizeof_void_p", get_sizeof_void_p, METH_NOARGS, NULL},
  2371. {"get_contiguous", get_contiguous, METH_VARARGS, NULL},
  2372. {"py_buffer_to_contiguous", py_buffer_to_contiguous, METH_VARARGS, NULL},
  2373. {"is_contiguous", is_contiguous, METH_VARARGS, NULL},
  2374. {"cmp_contig", cmp_contig, METH_VARARGS, NULL},
  2375. {NULL, NULL}
  2376. };
  2377. static struct PyModuleDef _testbuffermodule = {
  2378. PyModuleDef_HEAD_INIT,
  2379. "_testbuffer",
  2380. NULL,
  2381. -1,
  2382. _testbuffer_functions,
  2383. NULL,
  2384. NULL,
  2385. NULL,
  2386. NULL
  2387. };
  2388. PyMODINIT_FUNC
  2389. PyInit__testbuffer(void)
  2390. {
  2391. PyObject *m;
  2392. m = PyModule_Create(&_testbuffermodule);
  2393. if (m == NULL)
  2394. return NULL;
  2395. Py_SET_TYPE(&NDArray_Type, &PyType_Type);
  2396. Py_INCREF(&NDArray_Type);
  2397. PyModule_AddObject(m, "ndarray", (PyObject *)&NDArray_Type);
  2398. Py_SET_TYPE(&StaticArray_Type, &PyType_Type);
  2399. Py_INCREF(&StaticArray_Type);
  2400. PyModule_AddObject(m, "staticarray", (PyObject *)&StaticArray_Type);
  2401. structmodule = PyImport_ImportModule("struct");
  2402. if (structmodule == NULL)
  2403. return NULL;
  2404. Struct = PyObject_GetAttrString(structmodule, "Struct");
  2405. calcsize = PyObject_GetAttrString(structmodule, "calcsize");
  2406. if (Struct == NULL || calcsize == NULL)
  2407. return NULL;
  2408. simple_format = PyUnicode_FromString(simple_fmt);
  2409. if (simple_format == NULL)
  2410. return NULL;
  2411. PyModule_AddIntMacro(m, ND_MAX_NDIM);
  2412. PyModule_AddIntMacro(m, ND_VAREXPORT);
  2413. PyModule_AddIntMacro(m, ND_WRITABLE);
  2414. PyModule_AddIntMacro(m, ND_FORTRAN);
  2415. PyModule_AddIntMacro(m, ND_SCALAR);
  2416. PyModule_AddIntMacro(m, ND_PIL);
  2417. PyModule_AddIntMacro(m, ND_GETBUF_FAIL);
  2418. PyModule_AddIntMacro(m, ND_GETBUF_UNDEFINED);
  2419. PyModule_AddIntMacro(m, ND_REDIRECT);
  2420. PyModule_AddIntMacro(m, PyBUF_SIMPLE);
  2421. PyModule_AddIntMacro(m, PyBUF_WRITABLE);
  2422. PyModule_AddIntMacro(m, PyBUF_FORMAT);
  2423. PyModule_AddIntMacro(m, PyBUF_ND);
  2424. PyModule_AddIntMacro(m, PyBUF_STRIDES);
  2425. PyModule_AddIntMacro(m, PyBUF_INDIRECT);
  2426. PyModule_AddIntMacro(m, PyBUF_C_CONTIGUOUS);
  2427. PyModule_AddIntMacro(m, PyBUF_F_CONTIGUOUS);
  2428. PyModule_AddIntMacro(m, PyBUF_ANY_CONTIGUOUS);
  2429. PyModule_AddIntMacro(m, PyBUF_FULL);
  2430. PyModule_AddIntMacro(m, PyBUF_FULL_RO);
  2431. PyModule_AddIntMacro(m, PyBUF_RECORDS);
  2432. PyModule_AddIntMacro(m, PyBUF_RECORDS_RO);
  2433. PyModule_AddIntMacro(m, PyBUF_STRIDED);
  2434. PyModule_AddIntMacro(m, PyBUF_STRIDED_RO);
  2435. PyModule_AddIntMacro(m, PyBUF_CONTIG);
  2436. PyModule_AddIntMacro(m, PyBUF_CONTIG_RO);
  2437. PyModule_AddIntMacro(m, PyBUF_READ);
  2438. PyModule_AddIntMacro(m, PyBUF_WRITE);
  2439. return m;
  2440. }