You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2896 lines
82 KiB

  1. /* C Extension module to test all aspects of PEP-3118.
  2. Written by Stefan Krah. */
  3. #define PY_SSIZE_T_CLEAN
  4. #include "Python.h"
  5. /* struct module */
  6. PyObject *structmodule = NULL;
  7. PyObject *Struct = NULL;
  8. PyObject *calcsize = NULL;
  9. /* cache simple format string */
  10. static const char *simple_fmt = "B";
  11. PyObject *simple_format = NULL;
  12. #define SIMPLE_FORMAT(fmt) (fmt == NULL || strcmp(fmt, "B") == 0)
  13. #define FIX_FORMAT(fmt) (fmt == NULL ? "B" : fmt)
  14. /**************************************************************************/
  15. /* NDArray Object */
  16. /**************************************************************************/
  17. static PyTypeObject NDArray_Type;
  18. #define NDArray_Check(v) (Py_TYPE(v) == &NDArray_Type)
  19. #define CHECK_LIST_OR_TUPLE(v) \
  20. if (!PyList_Check(v) && !PyTuple_Check(v)) { \
  21. PyErr_SetString(PyExc_TypeError, \
  22. #v " must be a list or a tuple"); \
  23. return NULL; \
  24. } \
  25. #define PyMem_XFree(v) \
  26. do { if (v) PyMem_Free(v); } while (0)
  27. /* Maximum number of dimensions. */
  28. #define ND_MAX_NDIM (2 * PyBUF_MAX_NDIM)
  29. /* Check for the presence of suboffsets in the first dimension. */
  30. #define HAVE_PTR(suboffsets) (suboffsets && suboffsets[0] >= 0)
  31. /* Adjust ptr if suboffsets are present. */
  32. #define ADJUST_PTR(ptr, suboffsets) \
  33. (HAVE_PTR(suboffsets) ? *((char**)ptr) + suboffsets[0] : ptr)
  34. /* Default: NumPy style (strides), read-only, no var-export, C-style layout */
  35. #define ND_DEFAULT 0x000
  36. /* User configurable flags for the ndarray */
  37. #define ND_VAREXPORT 0x001 /* change layout while buffers are exported */
  38. /* User configurable flags for each base buffer */
  39. #define ND_WRITABLE 0x002 /* mark base buffer as writable */
  40. #define ND_FORTRAN 0x004 /* Fortran contiguous layout */
  41. #define ND_SCALAR 0x008 /* scalar: ndim = 0 */
  42. #define ND_PIL 0x010 /* convert to PIL-style array (suboffsets) */
  43. #define ND_REDIRECT 0x020 /* redirect buffer requests */
  44. #define ND_GETBUF_FAIL 0x040 /* trigger getbuffer failure */
  45. #define ND_GETBUF_UNDEFINED 0x080 /* undefined view.obj */
  46. /* Internal flags for the base buffer */
  47. #define ND_C 0x100 /* C contiguous layout (default) */
  48. #define ND_OWN_ARRAYS 0x200 /* consumer owns arrays */
  49. /* ndarray properties */
  50. #define ND_IS_CONSUMER(nd) \
  51. (((NDArrayObject *)nd)->head == &((NDArrayObject *)nd)->staticbuf)
  52. /* ndbuf->flags properties */
  53. #define ND_C_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C)))
  54. #define ND_FORTRAN_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_FORTRAN)))
  55. #define ND_ANY_CONTIGUOUS(flags) (!!(flags&(ND_SCALAR|ND_C|ND_FORTRAN)))
  56. /* getbuffer() requests */
  57. #define REQ_INDIRECT(flags) ((flags&PyBUF_INDIRECT) == PyBUF_INDIRECT)
  58. #define REQ_C_CONTIGUOUS(flags) ((flags&PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS)
  59. #define REQ_F_CONTIGUOUS(flags) ((flags&PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS)
  60. #define REQ_ANY_CONTIGUOUS(flags) ((flags&PyBUF_ANY_CONTIGUOUS) == PyBUF_ANY_CONTIGUOUS)
  61. #define REQ_STRIDES(flags) ((flags&PyBUF_STRIDES) == PyBUF_STRIDES)
  62. #define REQ_SHAPE(flags) ((flags&PyBUF_ND) == PyBUF_ND)
  63. #define REQ_WRITABLE(flags) (flags&PyBUF_WRITABLE)
  64. #define REQ_FORMAT(flags) (flags&PyBUF_FORMAT)
  65. /* Single node of a list of base buffers. The list is needed to implement
  66. changes in memory layout while exported buffers are active. */
  67. static PyTypeObject NDArray_Type;
  68. struct ndbuf;
  69. typedef struct ndbuf {
  70. struct ndbuf *next;
  71. struct ndbuf *prev;
  72. Py_ssize_t len; /* length of data */
  73. Py_ssize_t offset; /* start of the array relative to data */
  74. char *data; /* raw data */
  75. int flags; /* capabilities of the base buffer */
  76. Py_ssize_t exports; /* number of exports */
  77. Py_buffer base; /* base buffer */
  78. } ndbuf_t;
  79. typedef struct {
  80. PyObject_HEAD
  81. int flags; /* ndarray flags */
  82. ndbuf_t staticbuf; /* static buffer for re-exporting mode */
  83. ndbuf_t *head; /* currently active base buffer */
  84. } NDArrayObject;
  85. static ndbuf_t *
  86. ndbuf_new(Py_ssize_t nitems, Py_ssize_t itemsize, Py_ssize_t offset, int flags)
  87. {
  88. ndbuf_t *ndbuf;
  89. Py_buffer *base;
  90. Py_ssize_t len;
  91. len = nitems * itemsize;
  92. if (offset % itemsize) {
  93. PyErr_SetString(PyExc_ValueError,
  94. "offset must be a multiple of itemsize");
  95. return NULL;
  96. }
  97. if (offset < 0 || offset+itemsize > len) {
  98. PyErr_SetString(PyExc_ValueError, "offset out of bounds");
  99. return NULL;
  100. }
  101. ndbuf = PyMem_Malloc(sizeof *ndbuf);
  102. if (ndbuf == NULL) {
  103. PyErr_NoMemory();
  104. return NULL;
  105. }
  106. ndbuf->next = NULL;
  107. ndbuf->prev = NULL;
  108. ndbuf->len = len;
  109. ndbuf->offset= offset;
  110. ndbuf->data = PyMem_Malloc(len);
  111. if (ndbuf->data == NULL) {
  112. PyErr_NoMemory();
  113. PyMem_Free(ndbuf);
  114. return NULL;
  115. }
  116. ndbuf->flags = flags;
  117. ndbuf->exports = 0;
  118. base = &ndbuf->base;
  119. base->obj = NULL;
  120. base->buf = ndbuf->data;
  121. base->len = len;
  122. base->itemsize = 1;
  123. base->readonly = 0;
  124. base->format = NULL;
  125. base->ndim = 1;
  126. base->shape = NULL;
  127. base->strides = NULL;
  128. base->suboffsets = NULL;
  129. base->internal = ndbuf;
  130. return ndbuf;
  131. }
  132. static void
  133. ndbuf_free(ndbuf_t *ndbuf)
  134. {
  135. Py_buffer *base = &ndbuf->base;
  136. PyMem_XFree(ndbuf->data);
  137. PyMem_XFree(base->format);
  138. PyMem_XFree(base->shape);
  139. PyMem_XFree(base->strides);
  140. PyMem_XFree(base->suboffsets);
  141. PyMem_Free(ndbuf);
  142. }
  143. static void
  144. ndbuf_push(NDArrayObject *nd, ndbuf_t *elt)
  145. {
  146. elt->next = nd->head;
  147. if (nd->head) nd->head->prev = elt;
  148. nd->head = elt;
  149. elt->prev = NULL;
  150. }
  151. static void
  152. ndbuf_delete(NDArrayObject *nd, ndbuf_t *elt)
  153. {
  154. if (elt->prev)
  155. elt->prev->next = elt->next;
  156. else
  157. nd->head = elt->next;
  158. if (elt->next)
  159. elt->next->prev = elt->prev;
  160. ndbuf_free(elt);
  161. }
  162. static void
  163. ndbuf_pop(NDArrayObject *nd)
  164. {
  165. ndbuf_delete(nd, nd->head);
  166. }
  167. static PyObject *
  168. ndarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
  169. {
  170. NDArrayObject *nd;
  171. nd = PyObject_New(NDArrayObject, &NDArray_Type);
  172. if (nd == NULL)
  173. return NULL;
  174. nd->flags = 0;
  175. nd->head = NULL;
  176. return (PyObject *)nd;
  177. }
  178. static void
  179. ndarray_dealloc(NDArrayObject *self)
  180. {
  181. if (self->head) {
  182. if (ND_IS_CONSUMER(self)) {
  183. Py_buffer *base = &self->head->base;
  184. if (self->head->flags & ND_OWN_ARRAYS) {
  185. PyMem_XFree(base->shape);
  186. PyMem_XFree(base->strides);
  187. PyMem_XFree(base->suboffsets);
  188. }
  189. PyBuffer_Release(base);
  190. }
  191. else {
  192. while (self->head)
  193. ndbuf_pop(self);
  194. }
  195. }
  196. PyObject_Del(self);
  197. }
  198. static int
  199. ndarray_init_staticbuf(PyObject *exporter, NDArrayObject *nd, int flags)
  200. {
  201. Py_buffer *base = &nd->staticbuf.base;
  202. if (PyObject_GetBuffer(exporter, base, flags) < 0)
  203. return -1;
  204. nd->head = &nd->staticbuf;
  205. nd->head->next = NULL;
  206. nd->head->prev = NULL;
  207. nd->head->len = -1;
  208. nd->head->offset = -1;
  209. nd->head->data = NULL;
  210. nd->head->flags = base->readonly ? 0 : ND_WRITABLE;
  211. nd->head->exports = 0;
  212. return 0;
  213. }
  214. static void
  215. init_flags(ndbuf_t *ndbuf)
  216. {
  217. if (ndbuf->base.ndim == 0)
  218. ndbuf->flags |= ND_SCALAR;
  219. if (ndbuf->base.suboffsets)
  220. ndbuf->flags |= ND_PIL;
  221. if (PyBuffer_IsContiguous(&ndbuf->base, 'C'))
  222. ndbuf->flags |= ND_C;
  223. if (PyBuffer_IsContiguous(&ndbuf->base, 'F'))
  224. ndbuf->flags |= ND_FORTRAN;
  225. }
  226. /****************************************************************************/
  227. /* Buffer/List conversions */
  228. /****************************************************************************/
  229. static Py_ssize_t *strides_from_shape(const ndbuf_t *, int flags);
  230. /* Get number of members in a struct: see issue #12740 */
  231. typedef struct {
  232. PyObject_HEAD
  233. Py_ssize_t s_size;
  234. Py_ssize_t s_len;
  235. } PyPartialStructObject;
  236. static Py_ssize_t
  237. get_nmemb(PyObject *s)
  238. {
  239. return ((PyPartialStructObject *)s)->s_len;
  240. }
  241. /* Pack all items into the buffer of 'obj'. The 'format' parameter must be
  242. in struct module syntax. For standard C types, a single item is an integer.
  243. For compound types, a single item is a tuple of integers. */
  244. static int
  245. pack_from_list(PyObject *obj, PyObject *items, PyObject *format,
  246. Py_ssize_t itemsize)
  247. {
  248. PyObject *structobj, *pack_into;
  249. PyObject *args, *offset;
  250. PyObject *item, *tmp;
  251. Py_ssize_t nitems; /* number of items */
  252. Py_ssize_t nmemb; /* number of members in a single item */
  253. Py_ssize_t i, j;
  254. int ret = 0;
  255. assert(PyObject_CheckBuffer(obj));
  256. assert(PyList_Check(items) || PyTuple_Check(items));
  257. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  258. if (structobj == NULL)
  259. return -1;
  260. nitems = PySequence_Fast_GET_SIZE(items);
  261. nmemb = get_nmemb(structobj);
  262. assert(nmemb >= 1);
  263. pack_into = PyObject_GetAttrString(structobj, "pack_into");
  264. if (pack_into == NULL) {
  265. Py_DECREF(structobj);
  266. return -1;
  267. }
  268. /* nmemb >= 1 */
  269. args = PyTuple_New(2 + nmemb);
  270. if (args == NULL) {
  271. Py_DECREF(pack_into);
  272. Py_DECREF(structobj);
  273. return -1;
  274. }
  275. offset = NULL;
  276. for (i = 0; i < nitems; i++) {
  277. /* Loop invariant: args[j] are borrowed references or NULL. */
  278. PyTuple_SET_ITEM(args, 0, obj);
  279. for (j = 1; j < 2+nmemb; j++)
  280. PyTuple_SET_ITEM(args, j, NULL);
  281. Py_XDECREF(offset);
  282. offset = PyLong_FromSsize_t(i*itemsize);
  283. if (offset == NULL) {
  284. ret = -1;
  285. break;
  286. }
  287. PyTuple_SET_ITEM(args, 1, offset);
  288. item = PySequence_Fast_GET_ITEM(items, i);
  289. if ((PyBytes_Check(item) || PyLong_Check(item) ||
  290. PyFloat_Check(item)) && nmemb == 1) {
  291. PyTuple_SET_ITEM(args, 2, item);
  292. }
  293. else if ((PyList_Check(item) || PyTuple_Check(item)) &&
  294. PySequence_Length(item) == nmemb) {
  295. for (j = 0; j < nmemb; j++) {
  296. tmp = PySequence_Fast_GET_ITEM(item, j);
  297. PyTuple_SET_ITEM(args, 2+j, tmp);
  298. }
  299. }
  300. else {
  301. PyErr_SetString(PyExc_ValueError,
  302. "mismatch between initializer element and format string");
  303. ret = -1;
  304. break;
  305. }
  306. tmp = PyObject_CallObject(pack_into, args);
  307. if (tmp == NULL) {
  308. ret = -1;
  309. break;
  310. }
  311. Py_DECREF(tmp);
  312. }
  313. Py_INCREF(obj); /* args[0] */
  314. /* args[1]: offset is either NULL or should be dealloc'd */
  315. for (i = 2; i < 2+nmemb; i++) {
  316. tmp = PyTuple_GET_ITEM(args, i);
  317. Py_XINCREF(tmp);
  318. }
  319. Py_DECREF(args);
  320. Py_DECREF(pack_into);
  321. Py_DECREF(structobj);
  322. return ret;
  323. }
  324. /* Pack single element */
  325. static int
  326. pack_single(char *ptr, PyObject *item, const char *fmt, Py_ssize_t itemsize)
  327. {
  328. PyObject *structobj = NULL, *pack_into = NULL, *args = NULL;
  329. PyObject *format = NULL, *mview = NULL, *zero = NULL;
  330. Py_ssize_t i, nmemb;
  331. int ret = -1;
  332. PyObject *x;
  333. if (fmt == NULL) fmt = "B";
  334. format = PyUnicode_FromString(fmt);
  335. if (format == NULL)
  336. goto out;
  337. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  338. if (structobj == NULL)
  339. goto out;
  340. nmemb = get_nmemb(structobj);
  341. assert(nmemb >= 1);
  342. mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_WRITE);
  343. if (mview == NULL)
  344. goto out;
  345. zero = PyLong_FromLong(0);
  346. if (zero == NULL)
  347. goto out;
  348. pack_into = PyObject_GetAttrString(structobj, "pack_into");
  349. if (pack_into == NULL)
  350. goto out;
  351. args = PyTuple_New(2+nmemb);
  352. if (args == NULL)
  353. goto out;
  354. PyTuple_SET_ITEM(args, 0, mview);
  355. PyTuple_SET_ITEM(args, 1, zero);
  356. if ((PyBytes_Check(item) || PyLong_Check(item) ||
  357. PyFloat_Check(item)) && nmemb == 1) {
  358. PyTuple_SET_ITEM(args, 2, item);
  359. }
  360. else if ((PyList_Check(item) || PyTuple_Check(item)) &&
  361. PySequence_Length(item) == nmemb) {
  362. for (i = 0; i < nmemb; i++) {
  363. x = PySequence_Fast_GET_ITEM(item, i);
  364. PyTuple_SET_ITEM(args, 2+i, x);
  365. }
  366. }
  367. else {
  368. PyErr_SetString(PyExc_ValueError,
  369. "mismatch between initializer element and format string");
  370. goto args_out;
  371. }
  372. x = PyObject_CallObject(pack_into, args);
  373. if (x != NULL) {
  374. Py_DECREF(x);
  375. ret = 0;
  376. }
  377. args_out:
  378. for (i = 0; i < 2+nmemb; i++)
  379. Py_XINCREF(PyTuple_GET_ITEM(args, i));
  380. Py_XDECREF(args);
  381. out:
  382. Py_XDECREF(pack_into);
  383. Py_XDECREF(zero);
  384. Py_XDECREF(mview);
  385. Py_XDECREF(structobj);
  386. Py_XDECREF(format);
  387. return ret;
  388. }
  389. static void
  390. copy_rec(const Py_ssize_t *shape, Py_ssize_t ndim, Py_ssize_t itemsize,
  391. char *dptr, const Py_ssize_t *dstrides, const Py_ssize_t *dsuboffsets,
  392. char *sptr, const Py_ssize_t *sstrides, const Py_ssize_t *ssuboffsets,
  393. char *mem)
  394. {
  395. Py_ssize_t i;
  396. assert(ndim >= 1);
  397. if (ndim == 1) {
  398. if (!HAVE_PTR(dsuboffsets) && !HAVE_PTR(ssuboffsets) &&
  399. dstrides[0] == itemsize && sstrides[0] == itemsize) {
  400. memmove(dptr, sptr, shape[0] * itemsize);
  401. }
  402. else {
  403. char *p;
  404. assert(mem != NULL);
  405. for (i=0, p=mem; i<shape[0]; p+=itemsize, sptr+=sstrides[0], i++) {
  406. char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
  407. memcpy(p, xsptr, itemsize);
  408. }
  409. for (i=0, p=mem; i<shape[0]; p+=itemsize, dptr+=dstrides[0], i++) {
  410. char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
  411. memcpy(xdptr, p, itemsize);
  412. }
  413. }
  414. return;
  415. }
  416. for (i = 0; i < shape[0]; dptr+=dstrides[0], sptr+=sstrides[0], i++) {
  417. char *xdptr = ADJUST_PTR(dptr, dsuboffsets);
  418. char *xsptr = ADJUST_PTR(sptr, ssuboffsets);
  419. copy_rec(shape+1, ndim-1, itemsize,
  420. xdptr, dstrides+1, dsuboffsets ? dsuboffsets+1 : NULL,
  421. xsptr, sstrides+1, ssuboffsets ? ssuboffsets+1 : NULL,
  422. mem);
  423. }
  424. }
  425. static int
  426. cmp_structure(Py_buffer *dest, Py_buffer *src)
  427. {
  428. Py_ssize_t i;
  429. if (strcmp(FIX_FORMAT(dest->format), FIX_FORMAT(src->format)) != 0 ||
  430. dest->itemsize != src->itemsize ||
  431. dest->ndim != src->ndim)
  432. return -1;
  433. for (i = 0; i < dest->ndim; i++) {
  434. if (dest->shape[i] != src->shape[i])
  435. return -1;
  436. if (dest->shape[i] == 0)
  437. break;
  438. }
  439. return 0;
  440. }
  441. /* Copy src to dest. Both buffers must have the same format, itemsize,
  442. ndim and shape. Copying is atomic, the function never fails with
  443. a partial copy. */
  444. static int
  445. copy_buffer(Py_buffer *dest, Py_buffer *src)
  446. {
  447. char *mem = NULL;
  448. assert(dest->ndim > 0);
  449. if (cmp_structure(dest, src) < 0) {
  450. PyErr_SetString(PyExc_ValueError,
  451. "ndarray assignment: lvalue and rvalue have different structures");
  452. return -1;
  453. }
  454. if ((dest->suboffsets && dest->suboffsets[dest->ndim-1] >= 0) ||
  455. (src->suboffsets && src->suboffsets[src->ndim-1] >= 0) ||
  456. dest->strides[dest->ndim-1] != dest->itemsize ||
  457. src->strides[src->ndim-1] != src->itemsize) {
  458. mem = PyMem_Malloc(dest->shape[dest->ndim-1] * dest->itemsize);
  459. if (mem == NULL) {
  460. PyErr_NoMemory();
  461. return -1;
  462. }
  463. }
  464. copy_rec(dest->shape, dest->ndim, dest->itemsize,
  465. dest->buf, dest->strides, dest->suboffsets,
  466. src->buf, src->strides, src->suboffsets,
  467. mem);
  468. PyMem_XFree(mem);
  469. return 0;
  470. }
  471. /* Unpack single element */
  472. static PyObject *
  473. unpack_single(char *ptr, const char *fmt, Py_ssize_t itemsize)
  474. {
  475. PyObject *x, *unpack_from, *mview;
  476. if (fmt == NULL) {
  477. fmt = "B";
  478. itemsize = 1;
  479. }
  480. unpack_from = PyObject_GetAttrString(structmodule, "unpack_from");
  481. if (unpack_from == NULL)
  482. return NULL;
  483. mview = PyMemoryView_FromMemory(ptr, itemsize, PyBUF_READ);
  484. if (mview == NULL) {
  485. Py_DECREF(unpack_from);
  486. return NULL;
  487. }
  488. x = PyObject_CallFunction(unpack_from, "sO", fmt, mview);
  489. Py_DECREF(unpack_from);
  490. Py_DECREF(mview);
  491. if (x == NULL)
  492. return NULL;
  493. if (PyTuple_GET_SIZE(x) == 1) {
  494. PyObject *tmp = PyTuple_GET_ITEM(x, 0);
  495. Py_INCREF(tmp);
  496. Py_DECREF(x);
  497. return tmp;
  498. }
  499. return x;
  500. }
  501. /* Unpack a multi-dimensional matrix into a nested list. Return a scalar
  502. for ndim = 0. */
  503. static PyObject *
  504. unpack_rec(PyObject *unpack_from, char *ptr, PyObject *mview, char *item,
  505. const Py_ssize_t *shape, const Py_ssize_t *strides,
  506. const Py_ssize_t *suboffsets, Py_ssize_t ndim, Py_ssize_t itemsize)
  507. {
  508. PyObject *lst, *x;
  509. Py_ssize_t i;
  510. assert(ndim >= 0);
  511. assert(shape != NULL);
  512. assert(strides != NULL);
  513. if (ndim == 0) {
  514. memcpy(item, ptr, itemsize);
  515. x = PyObject_CallFunctionObjArgs(unpack_from, mview, NULL);
  516. if (x == NULL)
  517. return NULL;
  518. if (PyTuple_GET_SIZE(x) == 1) {
  519. PyObject *tmp = PyTuple_GET_ITEM(x, 0);
  520. Py_INCREF(tmp);
  521. Py_DECREF(x);
  522. return tmp;
  523. }
  524. return x;
  525. }
  526. lst = PyList_New(shape[0]);
  527. if (lst == NULL)
  528. return NULL;
  529. for (i = 0; i < shape[0]; ptr+=strides[0], i++) {
  530. char *nextptr = ADJUST_PTR(ptr, suboffsets);
  531. x = unpack_rec(unpack_from, nextptr, mview, item,
  532. shape+1, strides+1, suboffsets ? suboffsets+1 : NULL,
  533. ndim-1, itemsize);
  534. if (x == NULL) {
  535. Py_DECREF(lst);
  536. return NULL;
  537. }
  538. PyList_SET_ITEM(lst, i, x);
  539. }
  540. return lst;
  541. }
  542. static PyObject *
  543. ndarray_as_list(NDArrayObject *nd)
  544. {
  545. PyObject *structobj = NULL, *unpack_from = NULL;
  546. PyObject *lst = NULL, *mview = NULL;
  547. Py_buffer *base = &nd->head->base;
  548. Py_ssize_t *shape = base->shape;
  549. Py_ssize_t *strides = base->strides;
  550. Py_ssize_t simple_shape[1];
  551. Py_ssize_t simple_strides[1];
  552. char *item = NULL;
  553. PyObject *format;
  554. char *fmt = base->format;
  555. base = &nd->head->base;
  556. if (fmt == NULL) {
  557. PyErr_SetString(PyExc_ValueError,
  558. "ndarray: tolist() does not support format=NULL, use "
  559. "tobytes()");
  560. return NULL;
  561. }
  562. if (shape == NULL) {
  563. assert(ND_C_CONTIGUOUS(nd->head->flags));
  564. assert(base->strides == NULL);
  565. assert(base->ndim <= 1);
  566. shape = simple_shape;
  567. shape[0] = base->len;
  568. strides = simple_strides;
  569. strides[0] = base->itemsize;
  570. }
  571. else if (strides == NULL) {
  572. assert(ND_C_CONTIGUOUS(nd->head->flags));
  573. strides = strides_from_shape(nd->head, 0);
  574. if (strides == NULL)
  575. return NULL;
  576. }
  577. format = PyUnicode_FromString(fmt);
  578. if (format == NULL)
  579. goto out;
  580. structobj = PyObject_CallFunctionObjArgs(Struct, format, NULL);
  581. Py_DECREF(format);
  582. if (structobj == NULL)
  583. goto out;
  584. unpack_from = PyObject_GetAttrString(structobj, "unpack_from");
  585. if (unpack_from == NULL)
  586. goto out;
  587. item = PyMem_Malloc(base->itemsize);
  588. if (item == NULL) {
  589. PyErr_NoMemory();
  590. goto out;
  591. }
  592. mview = PyMemoryView_FromMemory(item, base->itemsize, PyBUF_WRITE);
  593. if (mview == NULL)
  594. goto out;
  595. lst = unpack_rec(unpack_from, base->buf, mview, item,
  596. shape, strides, base->suboffsets,
  597. base->ndim, base->itemsize);
  598. out:
  599. Py_XDECREF(mview);
  600. PyMem_XFree(item);
  601. Py_XDECREF(unpack_from);
  602. Py_XDECREF(structobj);
  603. if (strides != base->strides && strides != simple_strides)
  604. PyMem_XFree(strides);
  605. return lst;
  606. }
  607. /****************************************************************************/
  608. /* Initialize ndbuf */
  609. /****************************************************************************/
  610. /*
  611. State of a new ndbuf during initialization. 'OK' means that initialization
  612. is complete. 'PTR' means that a pointer has been initialized, but the
  613. state of the memory is still undefined and ndbuf->offset is disregarded.
  614. +-----------------+-----------+-------------+----------------+
  615. | | ndbuf_new | init_simple | init_structure |
  616. +-----------------+-----------+-------------+----------------+
  617. | next | OK (NULL) | OK | OK |
  618. +-----------------+-----------+-------------+----------------+
  619. | prev | OK (NULL) | OK | OK |
  620. +-----------------+-----------+-------------+----------------+
  621. | len | OK | OK | OK |
  622. +-----------------+-----------+-------------+----------------+
  623. | offset | OK | OK | OK |
  624. +-----------------+-----------+-------------+----------------+
  625. | data | PTR | OK | OK |
  626. +-----------------+-----------+-------------+----------------+
  627. | flags | user | user | OK |
  628. +-----------------+-----------+-------------+----------------+
  629. | exports | OK (0) | OK | OK |
  630. +-----------------+-----------+-------------+----------------+
  631. | base.obj | OK (NULL) | OK | OK |
  632. +-----------------+-----------+-------------+----------------+
  633. | base.buf | PTR | PTR | OK |
  634. +-----------------+-----------+-------------+----------------+
  635. | base.len | len(data) | len(data) | OK |
  636. +-----------------+-----------+-------------+----------------+
  637. | base.itemsize | 1 | OK | OK |
  638. +-----------------+-----------+-------------+----------------+
  639. | base.readonly | 0 | OK | OK |
  640. +-----------------+-----------+-------------+----------------+
  641. | base.format | NULL | OK | OK |
  642. +-----------------+-----------+-------------+----------------+
  643. | base.ndim | 1 | 1 | OK |
  644. +-----------------+-----------+-------------+----------------+
  645. | base.shape | NULL | NULL | OK |
  646. +-----------------+-----------+-------------+----------------+
  647. | base.strides | NULL | NULL | OK |
  648. +-----------------+-----------+-------------+----------------+
  649. | base.suboffsets | NULL | NULL | OK |
  650. +-----------------+-----------+-------------+----------------+
  651. | base.internal | OK | OK | OK |
  652. +-----------------+-----------+-------------+----------------+
  653. */
  654. static Py_ssize_t
  655. get_itemsize(PyObject *format)
  656. {
  657. PyObject *tmp;
  658. Py_ssize_t itemsize;
  659. tmp = PyObject_CallFunctionObjArgs(calcsize, format, NULL);
  660. if (tmp == NULL)
  661. return -1;
  662. itemsize = PyLong_AsSsize_t(tmp);
  663. Py_DECREF(tmp);
  664. return itemsize;
  665. }
  666. static char *
  667. get_format(PyObject *format)
  668. {
  669. PyObject *tmp;
  670. char *fmt;
  671. tmp = PyUnicode_AsASCIIString(format);
  672. if (tmp == NULL)
  673. return NULL;
  674. fmt = PyMem_Malloc(PyBytes_GET_SIZE(tmp)+1);
  675. if (fmt == NULL) {
  676. PyErr_NoMemory();
  677. Py_DECREF(tmp);
  678. return NULL;
  679. }
  680. strcpy(fmt, PyBytes_AS_STRING(tmp));
  681. Py_DECREF(tmp);
  682. return fmt;
  683. }
  684. static int
  685. init_simple(ndbuf_t *ndbuf, PyObject *items, PyObject *format,
  686. Py_ssize_t itemsize)
  687. {
  688. PyObject *mview;
  689. Py_buffer *base = &ndbuf->base;
  690. int ret;
  691. mview = PyMemoryView_FromBuffer(base);
  692. if (mview == NULL)
  693. return -1;
  694. ret = pack_from_list(mview, items, format, itemsize);
  695. Py_DECREF(mview);
  696. if (ret < 0)
  697. return -1;
  698. base->readonly = !(ndbuf->flags & ND_WRITABLE);
  699. base->itemsize = itemsize;
  700. base->format = get_format(format);
  701. if (base->format == NULL)
  702. return -1;
  703. return 0;
  704. }
  705. static Py_ssize_t *
  706. seq_as_ssize_array(PyObject *seq, Py_ssize_t len, int is_shape)
  707. {
  708. Py_ssize_t *dest;
  709. Py_ssize_t x, i;
  710. /* ndim = len <= ND_MAX_NDIM, so PyMem_New() is actually not needed. */
  711. dest = PyMem_New(Py_ssize_t, len);
  712. if (dest == NULL) {
  713. PyErr_NoMemory();
  714. return NULL;
  715. }
  716. for (i = 0; i < len; i++) {
  717. PyObject *tmp = PySequence_Fast_GET_ITEM(seq, i);
  718. if (!PyLong_Check(tmp)) {
  719. PyErr_Format(PyExc_ValueError,
  720. "elements of %s must be integers",
  721. is_shape ? "shape" : "strides");
  722. PyMem_Free(dest);
  723. return NULL;
  724. }
  725. x = PyLong_AsSsize_t(tmp);
  726. if (PyErr_Occurred()) {
  727. PyMem_Free(dest);
  728. return NULL;
  729. }
  730. if (is_shape && x < 0) {
  731. PyErr_Format(PyExc_ValueError,
  732. "elements of shape must be integers >= 0");
  733. PyMem_Free(dest);
  734. return NULL;
  735. }
  736. dest[i] = x;
  737. }
  738. return dest;
  739. }
  740. static Py_ssize_t *
  741. strides_from_shape(const ndbuf_t *ndbuf, int flags)
  742. {
  743. const Py_buffer *base = &ndbuf->base;
  744. Py_ssize_t *s, i;
  745. s = PyMem_Malloc(base->ndim * (sizeof *s));
  746. if (s == NULL) {
  747. PyErr_NoMemory();
  748. return NULL;
  749. }
  750. if (flags & ND_FORTRAN) {
  751. s[0] = base->itemsize;
  752. for (i = 1; i < base->ndim; i++)
  753. s[i] = s[i-1] * base->shape[i-1];
  754. }
  755. else {
  756. s[base->ndim-1] = base->itemsize;
  757. for (i = base->ndim-2; i >= 0; i--)
  758. s[i] = s[i+1] * base->shape[i+1];
  759. }
  760. return s;
  761. }
  762. /* Bounds check:
  763. len := complete length of allocated memory
  764. offset := start of the array
  765. A single array element is indexed by:
  766. i = indices[0] * strides[0] + indices[1] * strides[1] + ...
  767. imin is reached when all indices[n] combined with positive strides are 0
  768. and all indices combined with negative strides are shape[n]-1, which is
  769. the maximum index for the nth dimension.
  770. imax is reached when all indices[n] combined with negative strides are 0
  771. and all indices combined with positive strides are shape[n]-1.
  772. */
  773. static int
  774. verify_structure(Py_ssize_t len, Py_ssize_t itemsize, Py_ssize_t offset,
  775. const Py_ssize_t *shape, const Py_ssize_t *strides,
  776. Py_ssize_t ndim)
  777. {
  778. Py_ssize_t imin, imax;
  779. Py_ssize_t n;
  780. assert(ndim >= 0);
  781. if (ndim == 0 && (offset < 0 || offset+itemsize > len))
  782. goto invalid_combination;
  783. for (n = 0; n < ndim; n++)
  784. if (strides[n] % itemsize) {
  785. PyErr_SetString(PyExc_ValueError,
  786. "strides must be a multiple of itemsize");
  787. return -1;
  788. }
  789. for (n = 0; n < ndim; n++)
  790. if (shape[n] == 0)
  791. return 0;
  792. imin = imax = 0;
  793. for (n = 0; n < ndim; n++)
  794. if (strides[n] <= 0)
  795. imin += (shape[n]-1) * strides[n];
  796. else
  797. imax += (shape[n]-1) * strides[n];
  798. if (imin + offset < 0 || imax + offset + itemsize > len)
  799. goto invalid_combination;
  800. return 0;
  801. invalid_combination:
  802. PyErr_SetString(PyExc_ValueError,
  803. "invalid combination of buffer, shape and strides");
  804. return -1;
  805. }
  806. /*
  807. Convert a NumPy-style array to an array using suboffsets to stride in
  808. the first dimension. Requirements: ndim > 0.
  809. Contiguous example
  810. ==================
  811. Input:
  812. ------
  813. shape = {2, 2, 3};
  814. strides = {6, 3, 1};
  815. suboffsets = NULL;
  816. data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  817. buf = &data[0]
  818. Output:
  819. -------
  820. shape = {2, 2, 3};
  821. strides = {sizeof(char *), 3, 1};
  822. suboffsets = {0, -1, -1};
  823. data = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  824. | | ^ ^
  825. `---'---' |
  826. | |
  827. `---------------------'
  828. buf = &data[0]
  829. So, in the example the input resembles the three-dimensional array
  830. char v[2][2][3], while the output resembles an array of two pointers
  831. to two-dimensional arrays: char (*v[2])[2][3].
  832. Non-contiguous example:
  833. =======================
  834. Input (with offset and negative strides):
  835. -----------------------------------------
  836. shape = {2, 2, 3};
  837. strides = {-6, 3, -1};
  838. offset = 8
  839. suboffsets = NULL;
  840. data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  841. Output:
  842. -------
  843. shape = {2, 2, 3};
  844. strides = {-sizeof(char *), 3, -1};
  845. suboffsets = {2, -1, -1};
  846. newdata = {p1, p2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
  847. | | ^ ^ ^ ^
  848. `---'---' | | `- p2+suboffsets[0]
  849. | `-----------|--- p1+suboffsets[0]
  850. `---------------------'
  851. buf = &newdata[1] # striding backwards over the pointers.
  852. suboffsets[0] is the same as the offset that one would specify if
  853. the two {2, 3} subarrays were created directly, hence the name.
  854. */
  855. static int
  856. init_suboffsets(ndbuf_t *ndbuf)
  857. {
  858. Py_buffer *base = &ndbuf->base;
  859. Py_ssize_t start, step;
  860. Py_ssize_t imin, suboffset0;
  861. Py_ssize_t addsize;
  862. Py_ssize_t n;
  863. char *data;
  864. assert(base->ndim > 0);
  865. assert(base->suboffsets == NULL);
  866. /* Allocate new data with additional space for shape[0] pointers. */
  867. addsize = base->shape[0] * (sizeof (char *));
  868. /* Align array start to a multiple of 8. */
  869. addsize = 8 * ((addsize + 7) / 8);
  870. data = PyMem_Malloc(ndbuf->len + addsize);
  871. if (data == NULL) {
  872. PyErr_NoMemory();
  873. return -1;
  874. }
  875. memcpy(data + addsize, ndbuf->data, ndbuf->len);
  876. PyMem_Free(ndbuf->data);
  877. ndbuf->data = data;
  878. ndbuf->len += addsize;
  879. base->buf = ndbuf->data;
  880. /* imin: minimum index of the input array relative to ndbuf->offset.
  881. suboffset0: offset for each sub-array of the output. This is the
  882. same as calculating -imin' for a sub-array of ndim-1. */
  883. imin = suboffset0 = 0;
  884. for (n = 0; n < base->ndim; n++) {
  885. if (base->shape[n] == 0)
  886. break;
  887. if (base->strides[n] <= 0) {
  888. Py_ssize_t x = (base->shape[n]-1) * base->strides[n];
  889. imin += x;
  890. suboffset0 += (n >= 1) ? -x : 0;
  891. }
  892. }
  893. /* Initialize the array of pointers to the sub-arrays. */
  894. start = addsize + ndbuf->offset + imin;
  895. step = base->strides[0] < 0 ? -base->strides[0] : base->strides[0];
  896. for (n = 0; n < base->shape[0]; n++)
  897. ((char **)base->buf)[n] = (char *)base->buf + start + n*step;
  898. /* Initialize suboffsets. */
  899. base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
  900. if (base->suboffsets == NULL) {
  901. PyErr_NoMemory();
  902. return -1;
  903. }
  904. base->suboffsets[0] = suboffset0;
  905. for (n = 1; n < base->ndim; n++)
  906. base->suboffsets[n] = -1;
  907. /* Adjust strides for the first (zeroth) dimension. */
  908. if (base->strides[0] >= 0) {
  909. base->strides[0] = sizeof(char *);
  910. }
  911. else {
  912. /* Striding backwards. */
  913. base->strides[0] = -(Py_ssize_t)sizeof(char *);
  914. if (base->shape[0] > 0)
  915. base->buf = (char *)base->buf + (base->shape[0]-1) * sizeof(char *);
  916. }
  917. ndbuf->flags &= ~(ND_C|ND_FORTRAN);
  918. ndbuf->offset = 0;
  919. return 0;
  920. }
  921. static void
  922. init_len(Py_buffer *base)
  923. {
  924. Py_ssize_t i;
  925. base->len = 1;
  926. for (i = 0; i < base->ndim; i++)
  927. base->len *= base->shape[i];
  928. base->len *= base->itemsize;
  929. }
  930. static int
  931. init_structure(ndbuf_t *ndbuf, PyObject *shape, PyObject *strides,
  932. Py_ssize_t ndim)
  933. {
  934. Py_buffer *base = &ndbuf->base;
  935. base->ndim = (int)ndim;
  936. if (ndim == 0) {
  937. if (ndbuf->flags & ND_PIL) {
  938. PyErr_SetString(PyExc_TypeError,
  939. "ndim = 0 cannot be used in conjunction with ND_PIL");
  940. return -1;
  941. }
  942. ndbuf->flags |= (ND_SCALAR|ND_C|ND_FORTRAN);
  943. return 0;
  944. }
  945. /* shape */
  946. base->shape = seq_as_ssize_array(shape, ndim, 1);
  947. if (base->shape == NULL)
  948. return -1;
  949. /* strides */
  950. if (strides) {
  951. base->strides = seq_as_ssize_array(strides, ndim, 0);
  952. }
  953. else {
  954. base->strides = strides_from_shape(ndbuf, ndbuf->flags);
  955. }
  956. if (base->strides == NULL)
  957. return -1;
  958. if (verify_structure(base->len, base->itemsize, ndbuf->offset,
  959. base->shape, base->strides, ndim) < 0)
  960. return -1;
  961. /* buf */
  962. base->buf = ndbuf->data + ndbuf->offset;
  963. /* len */
  964. init_len(base);
  965. /* ndbuf->flags */
  966. if (PyBuffer_IsContiguous(base, 'C'))
  967. ndbuf->flags |= ND_C;
  968. if (PyBuffer_IsContiguous(base, 'F'))
  969. ndbuf->flags |= ND_FORTRAN;
  970. /* convert numpy array to suboffset representation */
  971. if (ndbuf->flags & ND_PIL) {
  972. /* modifies base->buf, base->strides and base->suboffsets **/
  973. return init_suboffsets(ndbuf);
  974. }
  975. return 0;
  976. }
  977. static ndbuf_t *
  978. init_ndbuf(PyObject *items, PyObject *shape, PyObject *strides,
  979. Py_ssize_t offset, PyObject *format, int flags)
  980. {
  981. ndbuf_t *ndbuf;
  982. Py_ssize_t ndim;
  983. Py_ssize_t nitems;
  984. Py_ssize_t itemsize;
  985. /* ndim = len(shape) */
  986. CHECK_LIST_OR_TUPLE(shape)
  987. ndim = PySequence_Fast_GET_SIZE(shape);
  988. if (ndim > ND_MAX_NDIM) {
  989. PyErr_Format(PyExc_ValueError,
  990. "ndim must not exceed %d", ND_MAX_NDIM);
  991. return NULL;
  992. }
  993. /* len(strides) = len(shape) */
  994. if (strides) {
  995. CHECK_LIST_OR_TUPLE(strides)
  996. if (PySequence_Fast_GET_SIZE(strides) == 0)
  997. strides = NULL;
  998. else if (flags & ND_FORTRAN) {
  999. PyErr_SetString(PyExc_TypeError,
  1000. "ND_FORTRAN cannot be used together with strides");
  1001. return NULL;
  1002. }
  1003. else if (PySequence_Fast_GET_SIZE(strides) != ndim) {
  1004. PyErr_SetString(PyExc_ValueError,
  1005. "len(shape) != len(strides)");
  1006. return NULL;
  1007. }
  1008. }
  1009. /* itemsize */
  1010. itemsize = get_itemsize(format);
  1011. if (itemsize <= 0) {
  1012. if (itemsize == 0) {
  1013. PyErr_SetString(PyExc_ValueError,
  1014. "itemsize must not be zero");
  1015. }
  1016. return NULL;
  1017. }
  1018. /* convert scalar to list */
  1019. if (ndim == 0) {
  1020. items = Py_BuildValue("(O)", items);
  1021. if (items == NULL)
  1022. return NULL;
  1023. }
  1024. else {
  1025. CHECK_LIST_OR_TUPLE(items)
  1026. Py_INCREF(items);
  1027. }
  1028. /* number of items */
  1029. nitems = PySequence_Fast_GET_SIZE(items);
  1030. if (nitems == 0) {
  1031. PyErr_SetString(PyExc_ValueError,
  1032. "initializer list or tuple must not be empty");
  1033. Py_DECREF(items);
  1034. return NULL;
  1035. }
  1036. ndbuf = ndbuf_new(nitems, itemsize, offset, flags);
  1037. if (ndbuf == NULL) {
  1038. Py_DECREF(items);
  1039. return NULL;
  1040. }
  1041. if (init_simple(ndbuf, items, format, itemsize) < 0)
  1042. goto error;
  1043. if (init_structure(ndbuf, shape, strides, ndim) < 0)
  1044. goto error;
  1045. Py_DECREF(items);
  1046. return ndbuf;
  1047. error:
  1048. Py_DECREF(items);
  1049. ndbuf_free(ndbuf);
  1050. return NULL;
  1051. }
  1052. /* initialize and push a new base onto the linked list */
  1053. static int
  1054. ndarray_push_base(NDArrayObject *nd, PyObject *items,
  1055. PyObject *shape, PyObject *strides,
  1056. Py_ssize_t offset, PyObject *format, int flags)
  1057. {
  1058. ndbuf_t *ndbuf;
  1059. ndbuf = init_ndbuf(items, shape, strides, offset, format, flags);
  1060. if (ndbuf == NULL)
  1061. return -1;
  1062. ndbuf_push(nd, ndbuf);
  1063. return 0;
  1064. }
  1065. #define PyBUF_UNUSED 0x10000
  1066. static int
  1067. ndarray_init(PyObject *self, PyObject *args, PyObject *kwds)
  1068. {
  1069. NDArrayObject *nd = (NDArrayObject *)self;
  1070. static char *kwlist[] = {
  1071. "obj", "shape", "strides", "offset", "format", "flags", "getbuf", NULL
  1072. };
  1073. PyObject *v = NULL; /* initializer: scalar, list, tuple or base object */
  1074. PyObject *shape = NULL; /* size of each dimension */
  1075. PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
  1076. Py_ssize_t offset = 0; /* buffer offset */
  1077. PyObject *format = simple_format; /* struct module specifier: "B" */
  1078. int flags = ND_DEFAULT; /* base buffer and ndarray flags */
  1079. int getbuf = PyBUF_UNUSED; /* re-exporter: getbuffer request flags */
  1080. if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|OOnOii", kwlist,
  1081. &v, &shape, &strides, &offset, &format, &flags, &getbuf))
  1082. return -1;
  1083. /* NDArrayObject is re-exporter */
  1084. if (PyObject_CheckBuffer(v) && shape == NULL) {
  1085. if (strides || offset || format != simple_format ||
  1086. !(flags == ND_DEFAULT || flags == ND_REDIRECT)) {
  1087. PyErr_SetString(PyExc_TypeError,
  1088. "construction from exporter object only takes 'obj', 'getbuf' "
  1089. "and 'flags' arguments");
  1090. return -1;
  1091. }
  1092. getbuf = (getbuf == PyBUF_UNUSED) ? PyBUF_FULL_RO : getbuf;
  1093. if (ndarray_init_staticbuf(v, nd, getbuf) < 0)
  1094. return -1;
  1095. init_flags(nd->head);
  1096. nd->head->flags |= flags;
  1097. return 0;
  1098. }
  1099. /* NDArrayObject is the original base object. */
  1100. if (getbuf != PyBUF_UNUSED) {
  1101. PyErr_SetString(PyExc_TypeError,
  1102. "getbuf argument only valid for construction from exporter "
  1103. "object");
  1104. return -1;
  1105. }
  1106. if (shape == NULL) {
  1107. PyErr_SetString(PyExc_TypeError,
  1108. "shape is a required argument when constructing from "
  1109. "list, tuple or scalar");
  1110. return -1;
  1111. }
  1112. if (flags & ND_VAREXPORT) {
  1113. nd->flags |= ND_VAREXPORT;
  1114. flags &= ~ND_VAREXPORT;
  1115. }
  1116. /* Initialize and push the first base buffer onto the linked list. */
  1117. return ndarray_push_base(nd, v, shape, strides, offset, format, flags);
  1118. }
  1119. /* Push an additional base onto the linked list. */
  1120. static PyObject *
  1121. ndarray_push(PyObject *self, PyObject *args, PyObject *kwds)
  1122. {
  1123. NDArrayObject *nd = (NDArrayObject *)self;
  1124. static char *kwlist[] = {
  1125. "items", "shape", "strides", "offset", "format", "flags", NULL
  1126. };
  1127. PyObject *items = NULL; /* initializer: scalar, list or tuple */
  1128. PyObject *shape = NULL; /* size of each dimension */
  1129. PyObject *strides = NULL; /* number of bytes to the next elt in each dim */
  1130. PyObject *format = simple_format; /* struct module specifier: "B" */
  1131. Py_ssize_t offset = 0; /* buffer offset */
  1132. int flags = ND_DEFAULT; /* base buffer flags */
  1133. if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO|OnOi", kwlist,
  1134. &items, &shape, &strides, &offset, &format, &flags))
  1135. return NULL;
  1136. if (flags & ND_VAREXPORT) {
  1137. PyErr_SetString(PyExc_ValueError,
  1138. "ND_VAREXPORT flag can only be used during object creation");
  1139. return NULL;
  1140. }
  1141. if (ND_IS_CONSUMER(nd)) {
  1142. PyErr_SetString(PyExc_BufferError,
  1143. "structure of re-exporting object is immutable");
  1144. return NULL;
  1145. }
  1146. if (!(nd->flags&ND_VAREXPORT) && nd->head->exports > 0) {
  1147. PyErr_Format(PyExc_BufferError,
  1148. "cannot change structure: %zd exported buffer%s",
  1149. nd->head->exports, nd->head->exports==1 ? "" : "s");
  1150. return NULL;
  1151. }
  1152. if (ndarray_push_base(nd, items, shape, strides,
  1153. offset, format, flags) < 0)
  1154. return NULL;
  1155. Py_RETURN_NONE;
  1156. }
  1157. /* Pop a base from the linked list (if possible). */
  1158. static PyObject *
  1159. ndarray_pop(PyObject *self, PyObject *dummy)
  1160. {
  1161. NDArrayObject *nd = (NDArrayObject *)self;
  1162. if (ND_IS_CONSUMER(nd)) {
  1163. PyErr_SetString(PyExc_BufferError,
  1164. "structure of re-exporting object is immutable");
  1165. return NULL;
  1166. }
  1167. if (nd->head->exports > 0) {
  1168. PyErr_Format(PyExc_BufferError,
  1169. "cannot change structure: %zd exported buffer%s",
  1170. nd->head->exports, nd->head->exports==1 ? "" : "s");
  1171. return NULL;
  1172. }
  1173. if (nd->head->next == NULL) {
  1174. PyErr_SetString(PyExc_BufferError,
  1175. "list only has a single base");
  1176. return NULL;
  1177. }
  1178. ndbuf_pop(nd);
  1179. Py_RETURN_NONE;
  1180. }
  1181. /**************************************************************************/
  1182. /* getbuffer */
  1183. /**************************************************************************/
  1184. static int
  1185. ndarray_getbuf(NDArrayObject *self, Py_buffer *view, int flags)
  1186. {
  1187. ndbuf_t *ndbuf = self->head;
  1188. Py_buffer *base = &ndbuf->base;
  1189. int baseflags = ndbuf->flags;
  1190. /* redirect mode */
  1191. if (base->obj != NULL && (baseflags&ND_REDIRECT)) {
  1192. return PyObject_GetBuffer(base->obj, view, flags);
  1193. }
  1194. /* start with complete information */
  1195. *view = *base;
  1196. view->obj = NULL;
  1197. /* reconstruct format */
  1198. if (view->format == NULL)
  1199. view->format = "B";
  1200. if (base->ndim != 0 &&
  1201. ((REQ_SHAPE(flags) && base->shape == NULL) ||
  1202. (REQ_STRIDES(flags) && base->strides == NULL))) {
  1203. /* The ndarray is a re-exporter that has been created without full
  1204. information for testing purposes. In this particular case the
  1205. ndarray is not a PEP-3118 compliant buffer provider. */
  1206. PyErr_SetString(PyExc_BufferError,
  1207. "re-exporter does not provide format, shape or strides");
  1208. return -1;
  1209. }
  1210. if (baseflags & ND_GETBUF_FAIL) {
  1211. PyErr_SetString(PyExc_BufferError,
  1212. "ND_GETBUF_FAIL: forced test exception");
  1213. if (baseflags & ND_GETBUF_UNDEFINED)
  1214. view->obj = (PyObject *)0x1; /* wrong but permitted in <= 3.2 */
  1215. return -1;
  1216. }
  1217. if (REQ_WRITABLE(flags) && base->readonly) {
  1218. PyErr_SetString(PyExc_BufferError,
  1219. "ndarray is not writable");
  1220. return -1;
  1221. }
  1222. if (!REQ_FORMAT(flags)) {
  1223. /* NULL indicates that the buffer's data type has been cast to 'B'.
  1224. view->itemsize is the _previous_ itemsize. If shape is present,
  1225. the equality product(shape) * itemsize = len still holds at this
  1226. point. The equality calcsize(format) = itemsize does _not_ hold
  1227. from here on! */
  1228. view->format = NULL;
  1229. }
  1230. if (REQ_C_CONTIGUOUS(flags) && !ND_C_CONTIGUOUS(baseflags)) {
  1231. PyErr_SetString(PyExc_BufferError,
  1232. "ndarray is not C-contiguous");
  1233. return -1;
  1234. }
  1235. if (REQ_F_CONTIGUOUS(flags) && !ND_FORTRAN_CONTIGUOUS(baseflags)) {
  1236. PyErr_SetString(PyExc_BufferError,
  1237. "ndarray is not Fortran contiguous");
  1238. return -1;
  1239. }
  1240. if (REQ_ANY_CONTIGUOUS(flags) && !ND_ANY_CONTIGUOUS(baseflags)) {
  1241. PyErr_SetString(PyExc_BufferError,
  1242. "ndarray is not contiguous");
  1243. return -1;
  1244. }
  1245. if (!REQ_INDIRECT(flags) && (baseflags & ND_PIL)) {
  1246. PyErr_SetString(PyExc_BufferError,
  1247. "ndarray cannot be represented without suboffsets");
  1248. return -1;
  1249. }
  1250. if (!REQ_STRIDES(flags)) {
  1251. if (!ND_C_CONTIGUOUS(baseflags)) {
  1252. PyErr_SetString(PyExc_BufferError,
  1253. "ndarray is not C-contiguous");
  1254. return -1;
  1255. }
  1256. view->strides = NULL;
  1257. }
  1258. if (!REQ_SHAPE(flags)) {
  1259. /* PyBUF_SIMPLE or PyBUF_WRITABLE: at this point buf is C-contiguous,
  1260. so base->buf = ndbuf->data. */
  1261. if (view->format != NULL) {
  1262. /* PyBUF_SIMPLE|PyBUF_FORMAT and PyBUF_WRITABLE|PyBUF_FORMAT do
  1263. not make sense. */
  1264. PyErr_Format(PyExc_BufferError,
  1265. "ndarray: cannot cast to unsigned bytes if the format flag "
  1266. "is present");
  1267. return -1;
  1268. }
  1269. /* product(shape) * itemsize = len and calcsize(format) = itemsize
  1270. do _not_ hold from here on! */
  1271. view->ndim = 1;
  1272. view->shape = NULL;
  1273. }
  1274. /* Ascertain that the new buffer has the same contiguity as the exporter */
  1275. if (ND_C_CONTIGUOUS(baseflags) != PyBuffer_IsContiguous(view, 'C') ||
  1276. /* skip cast to 1-d */
  1277. (view->format != NULL && view->shape != NULL &&
  1278. ND_FORTRAN_CONTIGUOUS(baseflags) != PyBuffer_IsContiguous(view, 'F')) ||
  1279. /* cast to 1-d */
  1280. (view->format == NULL && view->shape == NULL &&
  1281. !PyBuffer_IsContiguous(view, 'F'))) {
  1282. PyErr_SetString(PyExc_BufferError,
  1283. "ndarray: contiguity mismatch in getbuf()");
  1284. return -1;
  1285. }
  1286. view->obj = (PyObject *)self;
  1287. Py_INCREF(view->obj);
  1288. self->head->exports++;
  1289. return 0;
  1290. }
  1291. static int
  1292. ndarray_releasebuf(NDArrayObject *self, Py_buffer *view)
  1293. {
  1294. if (!ND_IS_CONSUMER(self)) {
  1295. ndbuf_t *ndbuf = view->internal;
  1296. if (--ndbuf->exports == 0 && ndbuf != self->head)
  1297. ndbuf_delete(self, ndbuf);
  1298. }
  1299. return 0;
  1300. }
  1301. static PyBufferProcs ndarray_as_buffer = {
  1302. (getbufferproc)ndarray_getbuf, /* bf_getbuffer */
  1303. (releasebufferproc)ndarray_releasebuf /* bf_releasebuffer */
  1304. };
  1305. /**************************************************************************/
  1306. /* indexing/slicing */
  1307. /**************************************************************************/
  1308. static char *
  1309. ptr_from_index(Py_buffer *base, Py_ssize_t index)
  1310. {
  1311. char *ptr;
  1312. Py_ssize_t nitems; /* items in the first dimension */
  1313. if (base->shape)
  1314. nitems = base->shape[0];
  1315. else {
  1316. assert(base->ndim == 1 && SIMPLE_FORMAT(base->format));
  1317. nitems = base->len;
  1318. }
  1319. if (index < 0) {
  1320. index += nitems;
  1321. }
  1322. if (index < 0 || index >= nitems) {
  1323. PyErr_SetString(PyExc_IndexError, "index out of bounds");
  1324. return NULL;
  1325. }
  1326. ptr = (char *)base->buf;
  1327. if (base->strides == NULL)
  1328. ptr += base->itemsize * index;
  1329. else
  1330. ptr += base->strides[0] * index;
  1331. ptr = ADJUST_PTR(ptr, base->suboffsets);
  1332. return ptr;
  1333. }
  1334. static PyObject *
  1335. ndarray_item(NDArrayObject *self, Py_ssize_t index)
  1336. {
  1337. ndbuf_t *ndbuf = self->head;
  1338. Py_buffer *base = &ndbuf->base;
  1339. char *ptr;
  1340. if (base->ndim == 0) {
  1341. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1342. return NULL;
  1343. }
  1344. ptr = ptr_from_index(base, index);
  1345. if (ptr == NULL)
  1346. return NULL;
  1347. if (base->ndim == 1) {
  1348. return unpack_single(ptr, base->format, base->itemsize);
  1349. }
  1350. else {
  1351. NDArrayObject *nd;
  1352. Py_buffer *subview;
  1353. nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
  1354. if (nd == NULL)
  1355. return NULL;
  1356. if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
  1357. Py_DECREF(nd);
  1358. return NULL;
  1359. }
  1360. subview = &nd->staticbuf.base;
  1361. subview->buf = ptr;
  1362. subview->len /= subview->shape[0];
  1363. subview->ndim--;
  1364. subview->shape++;
  1365. if (subview->strides) subview->strides++;
  1366. if (subview->suboffsets) subview->suboffsets++;
  1367. init_flags(&nd->staticbuf);
  1368. return (PyObject *)nd;
  1369. }
  1370. }
  1371. /*
  1372. For each dimension, we get valid (start, stop, step, slicelength) quadruples
  1373. from PySlice_GetIndicesEx().
  1374. Slicing NumPy arrays
  1375. ====================
  1376. A pointer to an element in a NumPy array is defined by:
  1377. ptr = (char *)buf + indices[0] * strides[0] +
  1378. ... +
  1379. indices[ndim-1] * strides[ndim-1]
  1380. Adjust buf:
  1381. -----------
  1382. Adding start[n] for each dimension effectively adds the constant:
  1383. c = start[0] * strides[0] + ... + start[ndim-1] * strides[ndim-1]
  1384. Therefore init_slice() adds all start[n] directly to buf.
  1385. Adjust shape:
  1386. -------------
  1387. Obviously shape[n] = slicelength[n]
  1388. Adjust strides:
  1389. ---------------
  1390. In the original array, the next element in a dimension is reached
  1391. by adding strides[n] to the pointer. In the sliced array, elements
  1392. may be skipped, so the next element is reached by adding:
  1393. strides[n] * step[n]
  1394. Slicing PIL arrays
  1395. ==================
  1396. Layout:
  1397. -------
  1398. In the first (zeroth) dimension, PIL arrays have an array of pointers
  1399. to sub-arrays of ndim-1. Striding in the first dimension is done by
  1400. getting the index of the nth pointer, dereference it and then add a
  1401. suboffset to it. The arrays pointed to can best be seen a regular
  1402. NumPy arrays.
  1403. Adjust buf:
  1404. -----------
  1405. In the original array, buf points to a location (usually the start)
  1406. in the array of pointers. For the sliced array, start[0] can be
  1407. added to buf in the same manner as for NumPy arrays.
  1408. Adjust suboffsets:
  1409. ------------------
  1410. Due to the dereferencing step in the addressing scheme, it is not
  1411. possible to adjust buf for higher dimensions. Recall that the
  1412. sub-arrays pointed to are regular NumPy arrays, so for each of
  1413. those arrays adding start[n] effectively adds the constant:
  1414. c = start[1] * strides[1] + ... + start[ndim-1] * strides[ndim-1]
  1415. This constant is added to suboffsets[0]. suboffsets[0] in turn is
  1416. added to each pointer right after dereferencing.
  1417. Adjust shape and strides:
  1418. -------------------------
  1419. Shape and strides are not influenced by the dereferencing step, so
  1420. they are adjusted in the same manner as for NumPy arrays.
  1421. Multiple levels of suboffsets
  1422. =============================
  1423. For a construct like an array of pointers to array of pointers to
  1424. sub-arrays of ndim-2:
  1425. suboffsets[0] = start[1] * strides[1]
  1426. suboffsets[1] = start[2] * strides[2] + ...
  1427. */
  1428. static int
  1429. init_slice(Py_buffer *base, PyObject *key, int dim)
  1430. {
  1431. Py_ssize_t start, stop, step, slicelength;
  1432. if (PySlice_Unpack(key, &start, &stop, &step) < 0) {
  1433. return -1;
  1434. }
  1435. slicelength = PySlice_AdjustIndices(base->shape[dim], &start, &stop, step);
  1436. if (base->suboffsets == NULL || dim == 0) {
  1437. adjust_buf:
  1438. base->buf = (char *)base->buf + base->strides[dim] * start;
  1439. }
  1440. else {
  1441. Py_ssize_t n = dim-1;
  1442. while (n >= 0 && base->suboffsets[n] < 0)
  1443. n--;
  1444. if (n < 0)
  1445. goto adjust_buf; /* all suboffsets are negative */
  1446. base->suboffsets[n] = base->suboffsets[n] + base->strides[dim] * start;
  1447. }
  1448. base->shape[dim] = slicelength;
  1449. base->strides[dim] = base->strides[dim] * step;
  1450. return 0;
  1451. }
  1452. static int
  1453. copy_structure(Py_buffer *base)
  1454. {
  1455. Py_ssize_t *shape = NULL, *strides = NULL, *suboffsets = NULL;
  1456. Py_ssize_t i;
  1457. shape = PyMem_Malloc(base->ndim * (sizeof *shape));
  1458. strides = PyMem_Malloc(base->ndim * (sizeof *strides));
  1459. if (shape == NULL || strides == NULL)
  1460. goto err_nomem;
  1461. suboffsets = NULL;
  1462. if (base->suboffsets) {
  1463. suboffsets = PyMem_Malloc(base->ndim * (sizeof *suboffsets));
  1464. if (suboffsets == NULL)
  1465. goto err_nomem;
  1466. }
  1467. for (i = 0; i < base->ndim; i++) {
  1468. shape[i] = base->shape[i];
  1469. strides[i] = base->strides[i];
  1470. if (suboffsets)
  1471. suboffsets[i] = base->suboffsets[i];
  1472. }
  1473. base->shape = shape;
  1474. base->strides = strides;
  1475. base->suboffsets = suboffsets;
  1476. return 0;
  1477. err_nomem:
  1478. PyErr_NoMemory();
  1479. PyMem_XFree(shape);
  1480. PyMem_XFree(strides);
  1481. PyMem_XFree(suboffsets);
  1482. return -1;
  1483. }
  1484. static PyObject *
  1485. ndarray_subscript(NDArrayObject *self, PyObject *key)
  1486. {
  1487. NDArrayObject *nd;
  1488. ndbuf_t *ndbuf;
  1489. Py_buffer *base = &self->head->base;
  1490. if (base->ndim == 0) {
  1491. if (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0) {
  1492. return unpack_single(base->buf, base->format, base->itemsize);
  1493. }
  1494. else if (key == Py_Ellipsis) {
  1495. Py_INCREF(self);
  1496. return (PyObject *)self;
  1497. }
  1498. else {
  1499. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1500. return NULL;
  1501. }
  1502. }
  1503. if (PyIndex_Check(key)) {
  1504. Py_ssize_t index = PyLong_AsSsize_t(key);
  1505. if (index == -1 && PyErr_Occurred())
  1506. return NULL;
  1507. return ndarray_item(self, index);
  1508. }
  1509. nd = (NDArrayObject *)ndarray_new(&NDArray_Type, NULL, NULL);
  1510. if (nd == NULL)
  1511. return NULL;
  1512. /* new ndarray is a consumer */
  1513. if (ndarray_init_staticbuf((PyObject *)self, nd, PyBUF_FULL_RO) < 0) {
  1514. Py_DECREF(nd);
  1515. return NULL;
  1516. }
  1517. /* copy shape, strides and suboffsets */
  1518. ndbuf = nd->head;
  1519. base = &ndbuf->base;
  1520. if (copy_structure(base) < 0) {
  1521. Py_DECREF(nd);
  1522. return NULL;
  1523. }
  1524. ndbuf->flags |= ND_OWN_ARRAYS;
  1525. if (PySlice_Check(key)) {
  1526. /* one-dimensional slice */
  1527. if (init_slice(base, key, 0) < 0)
  1528. goto err_occurred;
  1529. }
  1530. else if (PyTuple_Check(key)) {
  1531. /* multi-dimensional slice */
  1532. PyObject *tuple = key;
  1533. Py_ssize_t i, n;
  1534. n = PyTuple_GET_SIZE(tuple);
  1535. for (i = 0; i < n; i++) {
  1536. key = PyTuple_GET_ITEM(tuple, i);
  1537. if (!PySlice_Check(key))
  1538. goto type_error;
  1539. if (init_slice(base, key, (int)i) < 0)
  1540. goto err_occurred;
  1541. }
  1542. }
  1543. else {
  1544. goto type_error;
  1545. }
  1546. init_len(base);
  1547. init_flags(ndbuf);
  1548. return (PyObject *)nd;
  1549. type_error:
  1550. PyErr_Format(PyExc_TypeError,
  1551. "cannot index memory using \"%.200s\"",
  1552. key->ob_type->tp_name);
  1553. err_occurred:
  1554. Py_DECREF(nd);
  1555. return NULL;
  1556. }
  1557. static int
  1558. ndarray_ass_subscript(NDArrayObject *self, PyObject *key, PyObject *value)
  1559. {
  1560. NDArrayObject *nd;
  1561. Py_buffer *dest = &self->head->base;
  1562. Py_buffer src;
  1563. char *ptr;
  1564. Py_ssize_t index;
  1565. int ret = -1;
  1566. if (dest->readonly) {
  1567. PyErr_SetString(PyExc_TypeError, "ndarray is not writable");
  1568. return -1;
  1569. }
  1570. if (value == NULL) {
  1571. PyErr_SetString(PyExc_TypeError, "ndarray data cannot be deleted");
  1572. return -1;
  1573. }
  1574. if (dest->ndim == 0) {
  1575. if (key == Py_Ellipsis ||
  1576. (PyTuple_Check(key) && PyTuple_GET_SIZE(key) == 0)) {
  1577. ptr = (char *)dest->buf;
  1578. return pack_single(ptr, value, dest->format, dest->itemsize);
  1579. }
  1580. else {
  1581. PyErr_SetString(PyExc_TypeError, "invalid indexing of scalar");
  1582. return -1;
  1583. }
  1584. }
  1585. if (dest->ndim == 1 && PyIndex_Check(key)) {
  1586. /* rvalue must be a single item */
  1587. index = PyLong_AsSsize_t(key);
  1588. if (index == -1 && PyErr_Occurred())
  1589. return -1;
  1590. else {
  1591. ptr = ptr_from_index(dest, index);
  1592. if (ptr == NULL)
  1593. return -1;
  1594. }
  1595. return pack_single(ptr, value, dest->format, dest->itemsize);
  1596. }
  1597. /* rvalue must be an exporter */
  1598. if (PyObject_GetBuffer(value, &src, PyBUF_FULL_RO) == -1)
  1599. return -1;
  1600. nd = (NDArrayObject *)ndarray_subscript(self, key);
  1601. if (nd != NULL) {
  1602. dest = &nd->head->base;
  1603. ret = copy_buffer(dest, &src);
  1604. Py_DECREF(nd);
  1605. }
  1606. PyBuffer_Release(&src);
  1607. return ret;
  1608. }
  1609. static PyObject *
  1610. slice_indices(PyObject *self, PyObject *args)
  1611. {
  1612. PyObject *ret, *key, *tmp;
  1613. Py_ssize_t s[4]; /* start, stop, step, slicelength */
  1614. Py_ssize_t i, len;
  1615. if (!PyArg_ParseTuple(args, "On", &key, &len)) {
  1616. return NULL;
  1617. }
  1618. if (!PySlice_Check(key)) {
  1619. PyErr_SetString(PyExc_TypeError,
  1620. "first argument must be a slice object");
  1621. return NULL;
  1622. }
  1623. if (PySlice_Unpack(key, &s[0], &s[1], &s[2]) < 0) {
  1624. return NULL;
  1625. }
  1626. s[3] = PySlice_AdjustIndices(len, &s[0], &s[1], s[2]);
  1627. ret = PyTuple_New(4);
  1628. if (ret == NULL)
  1629. return NULL;
  1630. for (i = 0; i < 4; i++) {
  1631. tmp = PyLong_FromSsize_t(s[i]);
  1632. if (tmp == NULL)
  1633. goto error;
  1634. PyTuple_SET_ITEM(ret, i, tmp);
  1635. }
  1636. return ret;
  1637. error:
  1638. Py_DECREF(ret);
  1639. return NULL;
  1640. }
  1641. static PyMappingMethods ndarray_as_mapping = {
  1642. NULL, /* mp_length */
  1643. (binaryfunc)ndarray_subscript, /* mp_subscript */
  1644. (objobjargproc)ndarray_ass_subscript /* mp_ass_subscript */
  1645. };
  1646. static PySequenceMethods ndarray_as_sequence = {
  1647. 0, /* sq_length */
  1648. 0, /* sq_concat */
  1649. 0, /* sq_repeat */
  1650. (ssizeargfunc)ndarray_item, /* sq_item */
  1651. };
  1652. /**************************************************************************/
  1653. /* getters */
  1654. /**************************************************************************/
  1655. static PyObject *
  1656. ssize_array_as_tuple(Py_ssize_t *array, Py_ssize_t len)
  1657. {
  1658. PyObject *tuple, *x;
  1659. Py_ssize_t i;
  1660. if (array == NULL)
  1661. return PyTuple_New(0);
  1662. tuple = PyTuple_New(len);
  1663. if (tuple == NULL)
  1664. return NULL;
  1665. for (i = 0; i < len; i++) {
  1666. x = PyLong_FromSsize_t(array[i]);
  1667. if (x == NULL) {
  1668. Py_DECREF(tuple);
  1669. return NULL;
  1670. }
  1671. PyTuple_SET_ITEM(tuple, i, x);
  1672. }
  1673. return tuple;
  1674. }
  1675. static PyObject *
  1676. ndarray_get_flags(NDArrayObject *self, void *closure)
  1677. {
  1678. return PyLong_FromLong(self->head->flags);
  1679. }
  1680. static PyObject *
  1681. ndarray_get_offset(NDArrayObject *self, void *closure)
  1682. {
  1683. ndbuf_t *ndbuf = self->head;
  1684. return PyLong_FromSsize_t(ndbuf->offset);
  1685. }
  1686. static PyObject *
  1687. ndarray_get_obj(NDArrayObject *self, void *closure)
  1688. {
  1689. Py_buffer *base = &self->head->base;
  1690. if (base->obj == NULL) {
  1691. Py_RETURN_NONE;
  1692. }
  1693. Py_INCREF(base->obj);
  1694. return base->obj;
  1695. }
  1696. static PyObject *
  1697. ndarray_get_nbytes(NDArrayObject *self, void *closure)
  1698. {
  1699. Py_buffer *base = &self->head->base;
  1700. return PyLong_FromSsize_t(base->len);
  1701. }
  1702. static PyObject *
  1703. ndarray_get_readonly(NDArrayObject *self, void *closure)
  1704. {
  1705. Py_buffer *base = &self->head->base;
  1706. return PyLong_FromLong(base->readonly);
  1707. }
  1708. static PyObject *
  1709. ndarray_get_itemsize(NDArrayObject *self, void *closure)
  1710. {
  1711. Py_buffer *base = &self->head->base;
  1712. return PyLong_FromSsize_t(base->itemsize);
  1713. }
  1714. static PyObject *
  1715. ndarray_get_format(NDArrayObject *self, void *closure)
  1716. {
  1717. Py_buffer *base = &self->head->base;
  1718. char *fmt = base->format ? base->format : "";
  1719. return PyUnicode_FromString(fmt);
  1720. }
  1721. static PyObject *
  1722. ndarray_get_ndim(NDArrayObject *self, void *closure)
  1723. {
  1724. Py_buffer *base = &self->head->base;
  1725. return PyLong_FromSsize_t(base->ndim);
  1726. }
  1727. static PyObject *
  1728. ndarray_get_shape(NDArrayObject *self, void *closure)
  1729. {
  1730. Py_buffer *base = &self->head->base;
  1731. return ssize_array_as_tuple(base->shape, base->ndim);
  1732. }
  1733. static PyObject *
  1734. ndarray_get_strides(NDArrayObject *self, void *closure)
  1735. {
  1736. Py_buffer *base = &self->head->base;
  1737. return ssize_array_as_tuple(base->strides, base->ndim);
  1738. }
  1739. static PyObject *
  1740. ndarray_get_suboffsets(NDArrayObject *self, void *closure)
  1741. {
  1742. Py_buffer *base = &self->head->base;
  1743. return ssize_array_as_tuple(base->suboffsets, base->ndim);
  1744. }
  1745. static PyObject *
  1746. ndarray_c_contig(PyObject *self, PyObject *dummy)
  1747. {
  1748. NDArrayObject *nd = (NDArrayObject *)self;
  1749. int ret = PyBuffer_IsContiguous(&nd->head->base, 'C');
  1750. if (ret != ND_C_CONTIGUOUS(nd->head->flags)) {
  1751. PyErr_SetString(PyExc_RuntimeError,
  1752. "results from PyBuffer_IsContiguous() and flags differ");
  1753. return NULL;
  1754. }
  1755. return PyBool_FromLong(ret);
  1756. }
  1757. static PyObject *
  1758. ndarray_fortran_contig(PyObject *self, PyObject *dummy)
  1759. {
  1760. NDArrayObject *nd = (NDArrayObject *)self;
  1761. int ret = PyBuffer_IsContiguous(&nd->head->base, 'F');
  1762. if (ret != ND_FORTRAN_CONTIGUOUS(nd->head->flags)) {
  1763. PyErr_SetString(PyExc_RuntimeError,
  1764. "results from PyBuffer_IsContiguous() and flags differ");
  1765. return NULL;
  1766. }
  1767. return PyBool_FromLong(ret);
  1768. }
  1769. static PyObject *
  1770. ndarray_contig(PyObject *self, PyObject *dummy)
  1771. {
  1772. NDArrayObject *nd = (NDArrayObject *)self;
  1773. int ret = PyBuffer_IsContiguous(&nd->head->base, 'A');
  1774. if (ret != ND_ANY_CONTIGUOUS(nd->head->flags)) {
  1775. PyErr_SetString(PyExc_RuntimeError,
  1776. "results from PyBuffer_IsContiguous() and flags differ");
  1777. return NULL;
  1778. }
  1779. return PyBool_FromLong(ret);
  1780. }
  1781. static PyGetSetDef ndarray_getset [] =
  1782. {
  1783. /* ndbuf */
  1784. { "flags", (getter)ndarray_get_flags, NULL, NULL, NULL},
  1785. { "offset", (getter)ndarray_get_offset, NULL, NULL, NULL},
  1786. /* ndbuf.base */
  1787. { "obj", (getter)ndarray_get_obj, NULL, NULL, NULL},
  1788. { "nbytes", (getter)ndarray_get_nbytes, NULL, NULL, NULL},
  1789. { "readonly", (getter)ndarray_get_readonly, NULL, NULL, NULL},
  1790. { "itemsize", (getter)ndarray_get_itemsize, NULL, NULL, NULL},
  1791. { "format", (getter)ndarray_get_format, NULL, NULL, NULL},
  1792. { "ndim", (getter)ndarray_get_ndim, NULL, NULL, NULL},
  1793. { "shape", (getter)ndarray_get_shape, NULL, NULL, NULL},
  1794. { "strides", (getter)ndarray_get_strides, NULL, NULL, NULL},
  1795. { "suboffsets", (getter)ndarray_get_suboffsets, NULL, NULL, NULL},
  1796. { "c_contiguous", (getter)ndarray_c_contig, NULL, NULL, NULL},
  1797. { "f_contiguous", (getter)ndarray_fortran_contig, NULL, NULL, NULL},
  1798. { "contiguous", (getter)ndarray_contig, NULL, NULL, NULL},
  1799. {NULL}
  1800. };
  1801. static PyObject *
  1802. ndarray_tolist(PyObject *self, PyObject *dummy)
  1803. {
  1804. return ndarray_as_list((NDArrayObject *)self);
  1805. }
  1806. static PyObject *
  1807. ndarray_tobytes(PyObject *self, PyObject *dummy)
  1808. {
  1809. ndbuf_t *ndbuf = ((NDArrayObject *)self)->head;
  1810. Py_buffer *src = &ndbuf->base;
  1811. Py_buffer dest;
  1812. PyObject *ret = NULL;
  1813. char *mem;
  1814. if (ND_C_CONTIGUOUS(ndbuf->flags))
  1815. return PyBytes_FromStringAndSize(src->buf, src->len);
  1816. assert(src->shape != NULL);
  1817. assert(src->strides != NULL);
  1818. assert(src->ndim > 0);
  1819. mem = PyMem_Malloc(src->len);
  1820. if (mem == NULL) {
  1821. PyErr_NoMemory();
  1822. return NULL;
  1823. }
  1824. dest = *src;
  1825. dest.buf = mem;
  1826. dest.suboffsets = NULL;
  1827. dest.strides = strides_from_shape(ndbuf, 0);
  1828. if (dest.strides == NULL)
  1829. goto out;
  1830. if (copy_buffer(&dest, src) < 0)
  1831. goto out;
  1832. ret = PyBytes_FromStringAndSize(mem, src->len);
  1833. out:
  1834. PyMem_XFree(dest.strides);
  1835. PyMem_Free(mem);
  1836. return ret;
  1837. }
  1838. /* add redundant (negative) suboffsets for testing */
  1839. static PyObject *
  1840. ndarray_add_suboffsets(PyObject *self, PyObject *dummy)
  1841. {
  1842. NDArrayObject *nd = (NDArrayObject *)self;
  1843. Py_buffer *base = &nd->head->base;
  1844. Py_ssize_t i;
  1845. if (base->suboffsets != NULL) {
  1846. PyErr_SetString(PyExc_TypeError,
  1847. "cannot add suboffsets to PIL-style array");
  1848. return NULL;
  1849. }
  1850. if (base->strides == NULL) {
  1851. PyErr_SetString(PyExc_TypeError,
  1852. "cannot add suboffsets to array without strides");
  1853. return NULL;
  1854. }
  1855. base->suboffsets = PyMem_Malloc(base->ndim * (sizeof *base->suboffsets));
  1856. if (base->suboffsets == NULL) {
  1857. PyErr_NoMemory();
  1858. return NULL;
  1859. }
  1860. for (i = 0; i < base->ndim; i++)
  1861. base->suboffsets[i] = -1;
  1862. nd->head->flags &= ~(ND_C|ND_FORTRAN);
  1863. Py_RETURN_NONE;
  1864. }
  1865. /* Test PyMemoryView_FromBuffer(): return a memoryview from a static buffer.
  1866. Obviously this is fragile and only one such view may be active at any
  1867. time. Never use anything like this in real code! */
  1868. static char *infobuf = NULL;
  1869. static PyObject *
  1870. ndarray_memoryview_from_buffer(PyObject *self, PyObject *dummy)
  1871. {
  1872. const NDArrayObject *nd = (NDArrayObject *)self;
  1873. const Py_buffer *view = &nd->head->base;
  1874. const ndbuf_t *ndbuf;
  1875. static char format[ND_MAX_NDIM+1];
  1876. static Py_ssize_t shape[ND_MAX_NDIM];
  1877. static Py_ssize_t strides[ND_MAX_NDIM];
  1878. static Py_ssize_t suboffsets[ND_MAX_NDIM];
  1879. static Py_buffer info;
  1880. char *p;
  1881. if (!ND_IS_CONSUMER(nd))
  1882. ndbuf = nd->head; /* self is ndarray/original exporter */
  1883. else if (NDArray_Check(view->obj) && !ND_IS_CONSUMER(view->obj))
  1884. /* self is ndarray and consumer from ndarray/original exporter */
  1885. ndbuf = ((NDArrayObject *)view->obj)->head;
  1886. else {
  1887. PyErr_SetString(PyExc_TypeError,
  1888. "memoryview_from_buffer(): ndarray must be original exporter or "
  1889. "consumer from ndarray/original exporter");
  1890. return NULL;
  1891. }
  1892. info = *view;
  1893. p = PyMem_Realloc(infobuf, ndbuf->len);
  1894. if (p == NULL) {
  1895. PyMem_Free(infobuf);
  1896. PyErr_NoMemory();
  1897. infobuf = NULL;
  1898. return NULL;
  1899. }
  1900. else {
  1901. infobuf = p;
  1902. }
  1903. /* copy the complete raw data */
  1904. memcpy(infobuf, ndbuf->data, ndbuf->len);
  1905. info.buf = infobuf + ((char *)view->buf - ndbuf->data);
  1906. if (view->format) {
  1907. if (strlen(view->format) > ND_MAX_NDIM) {
  1908. PyErr_Format(PyExc_TypeError,
  1909. "memoryview_from_buffer: format is limited to %d characters",
  1910. ND_MAX_NDIM);
  1911. return NULL;
  1912. }
  1913. strcpy(format, view->format);
  1914. info.format = format;
  1915. }
  1916. if (view->ndim > ND_MAX_NDIM) {
  1917. PyErr_Format(PyExc_TypeError,
  1918. "memoryview_from_buffer: ndim is limited to %d", ND_MAX_NDIM);
  1919. return NULL;
  1920. }
  1921. if (view->shape) {
  1922. memcpy(shape, view->shape, view->ndim * sizeof(Py_ssize_t));
  1923. info.shape = shape;
  1924. }
  1925. if (view->strides) {
  1926. memcpy(strides, view->strides, view->ndim * sizeof(Py_ssize_t));
  1927. info.strides = strides;
  1928. }
  1929. if (view->suboffsets) {
  1930. memcpy(suboffsets, view->suboffsets, view->ndim * sizeof(Py_ssize_t));
  1931. info.suboffsets = suboffsets;
  1932. }
  1933. return PyMemoryView_FromBuffer(&info);
  1934. }
  1935. /* Get a single item from bufobj at the location specified by seq.
  1936. seq is a list or tuple of indices. The purpose of this function
  1937. is to check other functions against PyBuffer_GetPointer(). */
  1938. static PyObject *
  1939. get_pointer(PyObject *self, PyObject *args)
  1940. {
  1941. PyObject *ret = NULL, *bufobj, *seq;
  1942. Py_buffer view;
  1943. Py_ssize_t indices[ND_MAX_NDIM];
  1944. Py_ssize_t i;
  1945. void *ptr;
  1946. if (!PyArg_ParseTuple(args, "OO", &bufobj, &seq)) {
  1947. return NULL;
  1948. }
  1949. CHECK_LIST_OR_TUPLE(seq);
  1950. if (PyObject_GetBuffer(bufobj, &view, PyBUF_FULL_RO) < 0)
  1951. return NULL;
  1952. if (view.ndim > ND_MAX_NDIM) {
  1953. PyErr_Format(PyExc_ValueError,
  1954. "get_pointer(): ndim > %d", ND_MAX_NDIM);
  1955. goto out;
  1956. }
  1957. if (PySequence_Fast_GET_SIZE(seq) != view.ndim) {
  1958. PyErr_SetString(PyExc_ValueError,
  1959. "get_pointer(): len(indices) != ndim");
  1960. goto out;
  1961. }
  1962. for (i = 0; i < view.ndim; i++) {
  1963. PyObject *x = PySequence_Fast_GET_ITEM(seq, i);
  1964. indices[i] = PyLong_AsSsize_t(x);
  1965. if (PyErr_Occurred())
  1966. goto out;
  1967. if (indices[i] < 0 || indices[i] >= view.shape[i]) {
  1968. PyErr_Format(PyExc_ValueError,
  1969. "get_pointer(): invalid index %zd at position %zd",
  1970. indices[i], i);
  1971. goto out;
  1972. }
  1973. }
  1974. ptr = PyBuffer_GetPointer(&view, indices);
  1975. ret = unpack_single(ptr, view.format, view.itemsize);
  1976. out:
  1977. PyBuffer_Release(&view);
  1978. return ret;
  1979. }
  1980. static PyObject *
  1981. get_sizeof_void_p(PyObject *self)
  1982. {
  1983. return PyLong_FromSize_t(sizeof(void *));
  1984. }
  1985. static char
  1986. get_ascii_order(PyObject *order)
  1987. {
  1988. PyObject *ascii_order;
  1989. char ord;
  1990. if (!PyUnicode_Check(order)) {
  1991. PyErr_SetString(PyExc_TypeError,
  1992. "order must be a string");
  1993. return CHAR_MAX;
  1994. }
  1995. ascii_order = PyUnicode_AsASCIIString(order);
  1996. if (ascii_order == NULL) {
  1997. return CHAR_MAX;
  1998. }
  1999. ord = PyBytes_AS_STRING(ascii_order)[0];
  2000. Py_DECREF(ascii_order);
  2001. if (ord != 'C' && ord != 'F' && ord != 'A') {
  2002. PyErr_SetString(PyExc_ValueError,
  2003. "invalid order, must be C, F or A");
  2004. return CHAR_MAX;
  2005. }
  2006. return ord;
  2007. }
  2008. /* Get a contiguous memoryview. */
  2009. static PyObject *
  2010. get_contiguous(PyObject *self, PyObject *args)
  2011. {
  2012. PyObject *obj;
  2013. PyObject *buffertype;
  2014. PyObject *order;
  2015. long type;
  2016. char ord;
  2017. if (!PyArg_ParseTuple(args, "OOO", &obj, &buffertype, &order)) {
  2018. return NULL;
  2019. }
  2020. if (!PyLong_Check(buffertype)) {
  2021. PyErr_SetString(PyExc_TypeError,
  2022. "buffertype must be PyBUF_READ or PyBUF_WRITE");
  2023. return NULL;
  2024. }
  2025. type = PyLong_AsLong(buffertype);
  2026. if (type == -1 && PyErr_Occurred()) {
  2027. return NULL;
  2028. }
  2029. if (type != PyBUF_READ && type != PyBUF_WRITE) {
  2030. PyErr_SetString(PyExc_ValueError,
  2031. "invalid buffer type");
  2032. return NULL;
  2033. }
  2034. ord = get_ascii_order(order);
  2035. if (ord == CHAR_MAX)
  2036. return NULL;
  2037. return PyMemoryView_GetContiguous(obj, (int)type, ord);
  2038. }
  2039. /* PyBuffer_ToContiguous() */
  2040. static PyObject *
  2041. py_buffer_to_contiguous(PyObject *self, PyObject *args)
  2042. {
  2043. PyObject *obj;
  2044. PyObject *order;
  2045. PyObject *ret = NULL;
  2046. int flags;
  2047. char ord;
  2048. Py_buffer view;
  2049. char *buf = NULL;
  2050. if (!PyArg_ParseTuple(args, "OOi", &obj, &order, &flags)) {
  2051. return NULL;
  2052. }
  2053. if (PyObject_GetBuffer(obj, &view, flags) < 0) {
  2054. return NULL;
  2055. }
  2056. ord = get_ascii_order(order);
  2057. if (ord == CHAR_MAX) {
  2058. goto out;
  2059. }
  2060. buf = PyMem_Malloc(view.len);
  2061. if (buf == NULL) {
  2062. PyErr_NoMemory();
  2063. goto out;
  2064. }
  2065. if (PyBuffer_ToContiguous(buf, &view, view.len, ord) < 0) {
  2066. goto out;
  2067. }
  2068. ret = PyBytes_FromStringAndSize(buf, view.len);
  2069. out:
  2070. PyBuffer_Release(&view);
  2071. PyMem_XFree(buf);
  2072. return ret;
  2073. }
  2074. static int
  2075. fmtcmp(const char *fmt1, const char *fmt2)
  2076. {
  2077. if (fmt1 == NULL) {
  2078. return fmt2 == NULL || strcmp(fmt2, "B") == 0;
  2079. }
  2080. if (fmt2 == NULL) {
  2081. return fmt1 == NULL || strcmp(fmt1, "B") == 0;
  2082. }
  2083. return strcmp(fmt1, fmt2) == 0;
  2084. }
  2085. static int
  2086. arraycmp(const Py_ssize_t *a1, const Py_ssize_t *a2, const Py_ssize_t *shape,
  2087. Py_ssize_t ndim)
  2088. {
  2089. Py_ssize_t i;
  2090. for (i = 0; i < ndim; i++) {
  2091. if (shape && shape[i] <= 1) {
  2092. /* strides can differ if the dimension is less than 2 */
  2093. continue;
  2094. }
  2095. if (a1[i] != a2[i]) {
  2096. return 0;
  2097. }
  2098. }
  2099. return 1;
  2100. }
  2101. /* Compare two contiguous buffers for physical equality. */
  2102. static PyObject *
  2103. cmp_contig(PyObject *self, PyObject *args)
  2104. {
  2105. PyObject *b1, *b2; /* buffer objects */
  2106. Py_buffer v1, v2;
  2107. PyObject *ret;
  2108. int equal = 0;
  2109. if (!PyArg_ParseTuple(args, "OO", &b1, &b2)) {
  2110. return NULL;
  2111. }
  2112. if (PyObject_GetBuffer(b1, &v1, PyBUF_FULL_RO) < 0) {
  2113. PyErr_SetString(PyExc_TypeError,
  2114. "cmp_contig: first argument does not implement the buffer "
  2115. "protocol");
  2116. return NULL;
  2117. }
  2118. if (PyObject_GetBuffer(b2, &v2, PyBUF_FULL_RO) < 0) {
  2119. PyErr_SetString(PyExc_TypeError,
  2120. "cmp_contig: second argument does not implement the buffer "
  2121. "protocol");
  2122. PyBuffer_Release(&v1);
  2123. return NULL;
  2124. }
  2125. if (!(PyBuffer_IsContiguous(&v1, 'C')&&PyBuffer_IsContiguous(&v2, 'C')) &&
  2126. !(PyBuffer_IsContiguous(&v1, 'F')&&PyBuffer_IsContiguous(&v2, 'F'))) {
  2127. goto result;
  2128. }
  2129. /* readonly may differ if created from non-contiguous */
  2130. if (v1.len != v2.len ||
  2131. v1.itemsize != v2.itemsize ||
  2132. v1.ndim != v2.ndim ||
  2133. !fmtcmp(v1.format, v2.format) ||
  2134. !!v1.shape != !!v2.shape ||
  2135. !!v1.strides != !!v2.strides ||
  2136. !!v1.suboffsets != !!v2.suboffsets) {
  2137. goto result;
  2138. }
  2139. if ((v1.shape && !arraycmp(v1.shape, v2.shape, NULL, v1.ndim)) ||
  2140. (v1.strides && !arraycmp(v1.strides, v2.strides, v1.shape, v1.ndim)) ||
  2141. (v1.suboffsets && !arraycmp(v1.suboffsets, v2.suboffsets, NULL,
  2142. v1.ndim))) {
  2143. goto result;
  2144. }
  2145. if (memcmp((char *)v1.buf, (char *)v2.buf, v1.len) != 0) {
  2146. goto result;
  2147. }
  2148. equal = 1;
  2149. result:
  2150. PyBuffer_Release(&v1);
  2151. PyBuffer_Release(&v2);
  2152. ret = equal ? Py_True : Py_False;
  2153. Py_INCREF(ret);
  2154. return ret;
  2155. }
  2156. static PyObject *
  2157. is_contiguous(PyObject *self, PyObject *args)
  2158. {
  2159. PyObject *obj;
  2160. PyObject *order;
  2161. PyObject *ret = NULL;
  2162. Py_buffer view, *base;
  2163. char ord;
  2164. if (!PyArg_ParseTuple(args, "OO", &obj, &order)) {
  2165. return NULL;
  2166. }
  2167. ord = get_ascii_order(order);
  2168. if (ord == CHAR_MAX) {
  2169. return NULL;
  2170. }
  2171. if (NDArray_Check(obj)) {
  2172. /* Skip the buffer protocol to check simple etc. buffers directly. */
  2173. base = &((NDArrayObject *)obj)->head->base;
  2174. ret = PyBuffer_IsContiguous(base, ord) ? Py_True : Py_False;
  2175. }
  2176. else {
  2177. if (PyObject_GetBuffer(obj, &view, PyBUF_FULL_RO) < 0) {
  2178. PyErr_SetString(PyExc_TypeError,
  2179. "is_contiguous: object does not implement the buffer "
  2180. "protocol");
  2181. return NULL;
  2182. }
  2183. ret = PyBuffer_IsContiguous(&view, ord) ? Py_True : Py_False;
  2184. PyBuffer_Release(&view);
  2185. }
  2186. Py_INCREF(ret);
  2187. return ret;
  2188. }
  2189. static Py_hash_t
  2190. ndarray_hash(PyObject *self)
  2191. {
  2192. const NDArrayObject *nd = (NDArrayObject *)self;
  2193. const Py_buffer *view = &nd->head->base;
  2194. PyObject *bytes;
  2195. Py_hash_t hash;
  2196. if (!view->readonly) {
  2197. PyErr_SetString(PyExc_ValueError,
  2198. "cannot hash writable ndarray object");
  2199. return -1;
  2200. }
  2201. if (view->obj != NULL && PyObject_Hash(view->obj) == -1) {
  2202. return -1;
  2203. }
  2204. bytes = ndarray_tobytes(self, NULL);
  2205. if (bytes == NULL) {
  2206. return -1;
  2207. }
  2208. hash = PyObject_Hash(bytes);
  2209. Py_DECREF(bytes);
  2210. return hash;
  2211. }
  2212. static PyMethodDef ndarray_methods [] =
  2213. {
  2214. { "tolist", ndarray_tolist, METH_NOARGS, NULL },
  2215. { "tobytes", ndarray_tobytes, METH_NOARGS, NULL },
  2216. { "push", (PyCFunction)ndarray_push, METH_VARARGS|METH_KEYWORDS, NULL },
  2217. { "pop", ndarray_pop, METH_NOARGS, NULL },
  2218. { "add_suboffsets", ndarray_add_suboffsets, METH_NOARGS, NULL },
  2219. { "memoryview_from_buffer", ndarray_memoryview_from_buffer, METH_NOARGS, NULL },
  2220. {NULL}
  2221. };
  2222. static PyTypeObject NDArray_Type = {
  2223. PyVarObject_HEAD_INIT(NULL, 0)
  2224. "ndarray", /* Name of this type */
  2225. sizeof(NDArrayObject), /* Basic object size */
  2226. 0, /* Item size for varobject */
  2227. (destructor)ndarray_dealloc, /* tp_dealloc */
  2228. 0, /* tp_print */
  2229. 0, /* tp_getattr */
  2230. 0, /* tp_setattr */
  2231. 0, /* tp_compare */
  2232. 0, /* tp_repr */
  2233. 0, /* tp_as_number */
  2234. &ndarray_as_sequence, /* tp_as_sequence */
  2235. &ndarray_as_mapping, /* tp_as_mapping */
  2236. (hashfunc)ndarray_hash, /* tp_hash */
  2237. 0, /* tp_call */
  2238. 0, /* tp_str */
  2239. PyObject_GenericGetAttr, /* tp_getattro */
  2240. 0, /* tp_setattro */
  2241. &ndarray_as_buffer, /* tp_as_buffer */
  2242. Py_TPFLAGS_DEFAULT, /* tp_flags */
  2243. 0, /* tp_doc */
  2244. 0, /* tp_traverse */
  2245. 0, /* tp_clear */
  2246. 0, /* tp_richcompare */
  2247. 0, /* tp_weaklistoffset */
  2248. 0, /* tp_iter */
  2249. 0, /* tp_iternext */
  2250. ndarray_methods, /* tp_methods */
  2251. 0, /* tp_members */
  2252. ndarray_getset, /* tp_getset */
  2253. 0, /* tp_base */
  2254. 0, /* tp_dict */
  2255. 0, /* tp_descr_get */
  2256. 0, /* tp_descr_set */
  2257. 0, /* tp_dictoffset */
  2258. ndarray_init, /* tp_init */
  2259. 0, /* tp_alloc */
  2260. ndarray_new, /* tp_new */
  2261. };
  2262. /**************************************************************************/
  2263. /* StaticArray Object */
  2264. /**************************************************************************/
  2265. static PyTypeObject StaticArray_Type;
  2266. typedef struct {
  2267. PyObject_HEAD
  2268. int legacy_mode; /* if true, use the view.obj==NULL hack */
  2269. } StaticArrayObject;
  2270. static char static_mem[12] = {0,1,2,3,4,5,6,7,8,9,10,11};
  2271. static Py_ssize_t static_shape[1] = {12};
  2272. static Py_ssize_t static_strides[1] = {1};
  2273. static Py_buffer static_buffer = {
  2274. static_mem, /* buf */
  2275. NULL, /* obj */
  2276. 12, /* len */
  2277. 1, /* itemsize */
  2278. 1, /* readonly */
  2279. 1, /* ndim */
  2280. "B", /* format */
  2281. static_shape, /* shape */
  2282. static_strides, /* strides */
  2283. NULL, /* suboffsets */
  2284. NULL /* internal */
  2285. };
  2286. static PyObject *
  2287. staticarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
  2288. {
  2289. return (PyObject *)PyObject_New(StaticArrayObject, &StaticArray_Type);
  2290. }
  2291. static int
  2292. staticarray_init(PyObject *self, PyObject *args, PyObject *kwds)
  2293. {
  2294. StaticArrayObject *a = (StaticArrayObject *)self;
  2295. static char *kwlist[] = {
  2296. "legacy_mode", NULL
  2297. };
  2298. PyObject *legacy_mode = Py_False;
  2299. if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O", kwlist, &legacy_mode))
  2300. return -1;
  2301. a->legacy_mode = (legacy_mode != Py_False);
  2302. return 0;
  2303. }
  2304. static void
  2305. staticarray_dealloc(StaticArrayObject *self)
  2306. {
  2307. PyObject_Del(self);
  2308. }
  2309. /* Return a buffer for a PyBUF_FULL_RO request. Flags are not checked,
  2310. which makes this object a non-compliant exporter! */
  2311. static int
  2312. staticarray_getbuf(StaticArrayObject *self, Py_buffer *view, int flags)
  2313. {
  2314. *view = static_buffer;
  2315. if (self->legacy_mode) {
  2316. view->obj = NULL; /* Don't use this in new code. */
  2317. }
  2318. else {
  2319. view->obj = (PyObject *)self;
  2320. Py_INCREF(view->obj);
  2321. }
  2322. return 0;
  2323. }
  2324. static PyBufferProcs staticarray_as_buffer = {
  2325. (getbufferproc)staticarray_getbuf, /* bf_getbuffer */
  2326. NULL, /* bf_releasebuffer */
  2327. };
  2328. static PyTypeObject StaticArray_Type = {
  2329. PyVarObject_HEAD_INIT(NULL, 0)
  2330. "staticarray", /* Name of this type */
  2331. sizeof(StaticArrayObject), /* Basic object size */
  2332. 0, /* Item size for varobject */
  2333. (destructor)staticarray_dealloc, /* tp_dealloc */
  2334. 0, /* tp_print */
  2335. 0, /* tp_getattr */
  2336. 0, /* tp_setattr */
  2337. 0, /* tp_compare */
  2338. 0, /* tp_repr */
  2339. 0, /* tp_as_number */
  2340. 0, /* tp_as_sequence */
  2341. 0, /* tp_as_mapping */
  2342. 0, /* tp_hash */
  2343. 0, /* tp_call */
  2344. 0, /* tp_str */
  2345. 0, /* tp_getattro */
  2346. 0, /* tp_setattro */
  2347. &staticarray_as_buffer, /* tp_as_buffer */
  2348. Py_TPFLAGS_DEFAULT, /* tp_flags */
  2349. 0, /* tp_doc */
  2350. 0, /* tp_traverse */
  2351. 0, /* tp_clear */
  2352. 0, /* tp_richcompare */
  2353. 0, /* tp_weaklistoffset */
  2354. 0, /* tp_iter */
  2355. 0, /* tp_iternext */
  2356. 0, /* tp_methods */
  2357. 0, /* tp_members */
  2358. 0, /* tp_getset */
  2359. 0, /* tp_base */
  2360. 0, /* tp_dict */
  2361. 0, /* tp_descr_get */
  2362. 0, /* tp_descr_set */
  2363. 0, /* tp_dictoffset */
  2364. staticarray_init, /* tp_init */
  2365. 0, /* tp_alloc */
  2366. staticarray_new, /* tp_new */
  2367. };
  2368. static struct PyMethodDef _testbuffer_functions[] = {
  2369. {"slice_indices", slice_indices, METH_VARARGS, NULL},
  2370. {"get_pointer", get_pointer, METH_VARARGS, NULL},
  2371. {"get_sizeof_void_p", (PyCFunction)get_sizeof_void_p, METH_NOARGS, NULL},
  2372. {"get_contiguous", get_contiguous, METH_VARARGS, NULL},
  2373. {"py_buffer_to_contiguous", py_buffer_to_contiguous, METH_VARARGS, NULL},
  2374. {"is_contiguous", is_contiguous, METH_VARARGS, NULL},
  2375. {"cmp_contig", cmp_contig, METH_VARARGS, NULL},
  2376. {NULL, NULL}
  2377. };
  2378. static struct PyModuleDef _testbuffermodule = {
  2379. PyModuleDef_HEAD_INIT,
  2380. "_testbuffer",
  2381. NULL,
  2382. -1,
  2383. _testbuffer_functions,
  2384. NULL,
  2385. NULL,
  2386. NULL,
  2387. NULL
  2388. };
  2389. PyMODINIT_FUNC
  2390. PyInit__testbuffer(void)
  2391. {
  2392. PyObject *m;
  2393. m = PyModule_Create(&_testbuffermodule);
  2394. if (m == NULL)
  2395. return NULL;
  2396. Py_TYPE(&NDArray_Type) = &PyType_Type;
  2397. Py_INCREF(&NDArray_Type);
  2398. PyModule_AddObject(m, "ndarray", (PyObject *)&NDArray_Type);
  2399. Py_TYPE(&StaticArray_Type) = &PyType_Type;
  2400. Py_INCREF(&StaticArray_Type);
  2401. PyModule_AddObject(m, "staticarray", (PyObject *)&StaticArray_Type);
  2402. structmodule = PyImport_ImportModule("struct");
  2403. if (structmodule == NULL)
  2404. return NULL;
  2405. Struct = PyObject_GetAttrString(structmodule, "Struct");
  2406. calcsize = PyObject_GetAttrString(structmodule, "calcsize");
  2407. if (Struct == NULL || calcsize == NULL)
  2408. return NULL;
  2409. simple_format = PyUnicode_FromString(simple_fmt);
  2410. if (simple_format == NULL)
  2411. return NULL;
  2412. PyModule_AddIntMacro(m, ND_MAX_NDIM);
  2413. PyModule_AddIntMacro(m, ND_VAREXPORT);
  2414. PyModule_AddIntMacro(m, ND_WRITABLE);
  2415. PyModule_AddIntMacro(m, ND_FORTRAN);
  2416. PyModule_AddIntMacro(m, ND_SCALAR);
  2417. PyModule_AddIntMacro(m, ND_PIL);
  2418. PyModule_AddIntMacro(m, ND_GETBUF_FAIL);
  2419. PyModule_AddIntMacro(m, ND_GETBUF_UNDEFINED);
  2420. PyModule_AddIntMacro(m, ND_REDIRECT);
  2421. PyModule_AddIntMacro(m, PyBUF_SIMPLE);
  2422. PyModule_AddIntMacro(m, PyBUF_WRITABLE);
  2423. PyModule_AddIntMacro(m, PyBUF_FORMAT);
  2424. PyModule_AddIntMacro(m, PyBUF_ND);
  2425. PyModule_AddIntMacro(m, PyBUF_STRIDES);
  2426. PyModule_AddIntMacro(m, PyBUF_INDIRECT);
  2427. PyModule_AddIntMacro(m, PyBUF_C_CONTIGUOUS);
  2428. PyModule_AddIntMacro(m, PyBUF_F_CONTIGUOUS);
  2429. PyModule_AddIntMacro(m, PyBUF_ANY_CONTIGUOUS);
  2430. PyModule_AddIntMacro(m, PyBUF_FULL);
  2431. PyModule_AddIntMacro(m, PyBUF_FULL_RO);
  2432. PyModule_AddIntMacro(m, PyBUF_RECORDS);
  2433. PyModule_AddIntMacro(m, PyBUF_RECORDS_RO);
  2434. PyModule_AddIntMacro(m, PyBUF_STRIDED);
  2435. PyModule_AddIntMacro(m, PyBUF_STRIDED_RO);
  2436. PyModule_AddIntMacro(m, PyBUF_CONTIG);
  2437. PyModule_AddIntMacro(m, PyBUF_CONTIG_RO);
  2438. PyModule_AddIntMacro(m, PyBUF_READ);
  2439. PyModule_AddIntMacro(m, PyBUF_WRITE);
  2440. return m;
  2441. }