1
2 /* Tuple object implementation */
3
4 #include "Python.h"
5 #include "accu.h"
6
7 /* Speed optimization to avoid frequent malloc/free of small tuples */
8 #ifndef PyTuple_MAXSAVESIZE
9 #define PyTuple_MAXSAVESIZE 20 /* Largest tuple to save on free list */
10 #endif
11 #ifndef PyTuple_MAXFREELIST
12 #define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */
13 #endif
14
15 #if PyTuple_MAXSAVESIZE > 0
16 /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty
17 tuple () of which at most one instance will be allocated.
18 */
19 static PyTupleObject *free_list[PyTuple_MAXSAVESIZE];
20 static int numfree[PyTuple_MAXSAVESIZE];
21 #endif
22 #ifdef COUNT_ALLOCS
23 Py_ssize_t fast_tuple_allocs;
24 Py_ssize_t tuple_zero_allocs;
25 #endif
26
27 /* Debug statistic to count GC tracking of tuples.
28 Please note that tuples are only untracked when considered by the GC, and
29 many of them will be dead before. Therefore, a tracking rate close to 100%
30 does not necessarily prove that the heuristic is inefficient.
31 */
32 #ifdef SHOW_TRACK_COUNT
33 static Py_ssize_t count_untracked = 0;
34 static Py_ssize_t count_tracked = 0;
35
36 static void
show_track(void)37 show_track(void)
38 {
39 PyObject *xoptions, *value;
40 _Py_IDENTIFIER(showalloccount);
41
42 xoptions = PySys_GetXOptions();
43 if (xoptions == NULL)
44 return;
45 value = _PyDict_GetItemId(xoptions, &PyId_showalloccount);
46 if (value != Py_True)
47 return;
48
49 fprintf(stderr, "Tuples created: %" PY_FORMAT_SIZE_T "d\n",
50 count_tracked + count_untracked);
51 fprintf(stderr, "Tuples tracked by the GC: %" PY_FORMAT_SIZE_T
52 "d\n", count_tracked);
53 fprintf(stderr, "%.2f%% tuple tracking rate\n\n",
54 (100.0*count_tracked/(count_untracked+count_tracked)));
55 }
56 #endif
57
58 /* Print summary info about the state of the optimized allocator */
59 void
_PyTuple_DebugMallocStats(FILE * out)60 _PyTuple_DebugMallocStats(FILE *out)
61 {
62 #if PyTuple_MAXSAVESIZE > 0
63 int i;
64 char buf[128];
65 for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
66 PyOS_snprintf(buf, sizeof(buf),
67 "free %d-sized PyTupleObject", i);
68 _PyDebugAllocatorStats(out,
69 buf,
70 numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i));
71 }
72 #endif
73 }
74
75 PyObject *
PyTuple_New(Py_ssize_t size)76 PyTuple_New(Py_ssize_t size)
77 {
78 PyTupleObject *op;
79 Py_ssize_t i;
80 if (size < 0) {
81 PyErr_BadInternalCall();
82 return NULL;
83 }
84 #if PyTuple_MAXSAVESIZE > 0
85 if (size == 0 && free_list[0]) {
86 op = free_list[0];
87 Py_INCREF(op);
88 #ifdef COUNT_ALLOCS
89 tuple_zero_allocs++;
90 #endif
91 return (PyObject *) op;
92 }
93 if (size < PyTuple_MAXSAVESIZE && (op = free_list[size]) != NULL) {
94 free_list[size] = (PyTupleObject *) op->ob_item[0];
95 numfree[size]--;
96 #ifdef COUNT_ALLOCS
97 fast_tuple_allocs++;
98 #endif
99 /* Inline PyObject_InitVar */
100 #ifdef Py_TRACE_REFS
101 Py_SIZE(op) = size;
102 Py_TYPE(op) = &PyTuple_Type;
103 #endif
104 _Py_NewReference((PyObject *)op);
105 }
106 else
107 #endif
108 {
109 /* Check for overflow */
110 if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - sizeof(PyTupleObject) -
111 sizeof(PyObject *)) / sizeof(PyObject *)) {
112 return PyErr_NoMemory();
113 }
114 op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
115 if (op == NULL)
116 return NULL;
117 }
118 for (i=0; i < size; i++)
119 op->ob_item[i] = NULL;
120 #if PyTuple_MAXSAVESIZE > 0
121 if (size == 0) {
122 free_list[0] = op;
123 ++numfree[0];
124 Py_INCREF(op); /* extra INCREF so that this is never freed */
125 }
126 #endif
127 #ifdef SHOW_TRACK_COUNT
128 count_tracked++;
129 #endif
130 _PyObject_GC_TRACK(op);
131 return (PyObject *) op;
132 }
133
134 Py_ssize_t
PyTuple_Size(PyObject * op)135 PyTuple_Size(PyObject *op)
136 {
137 if (!PyTuple_Check(op)) {
138 PyErr_BadInternalCall();
139 return -1;
140 }
141 else
142 return Py_SIZE(op);
143 }
144
145 PyObject *
PyTuple_GetItem(PyObject * op,Py_ssize_t i)146 PyTuple_GetItem(PyObject *op, Py_ssize_t i)
147 {
148 if (!PyTuple_Check(op)) {
149 PyErr_BadInternalCall();
150 return NULL;
151 }
152 if (i < 0 || i >= Py_SIZE(op)) {
153 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
154 return NULL;
155 }
156 return ((PyTupleObject *)op) -> ob_item[i];
157 }
158
159 int
PyTuple_SetItem(PyObject * op,Py_ssize_t i,PyObject * newitem)160 PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
161 {
162 PyObject **p;
163 if (!PyTuple_Check(op) || op->ob_refcnt != 1) {
164 Py_XDECREF(newitem);
165 PyErr_BadInternalCall();
166 return -1;
167 }
168 if (i < 0 || i >= Py_SIZE(op)) {
169 Py_XDECREF(newitem);
170 PyErr_SetString(PyExc_IndexError,
171 "tuple assignment index out of range");
172 return -1;
173 }
174 p = ((PyTupleObject *)op) -> ob_item + i;
175 Py_XSETREF(*p, newitem);
176 return 0;
177 }
178
179 void
_PyTuple_MaybeUntrack(PyObject * op)180 _PyTuple_MaybeUntrack(PyObject *op)
181 {
182 PyTupleObject *t;
183 Py_ssize_t i, n;
184
185 if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
186 return;
187 t = (PyTupleObject *) op;
188 n = Py_SIZE(t);
189 for (i = 0; i < n; i++) {
190 PyObject *elt = PyTuple_GET_ITEM(t, i);
191 /* Tuple with NULL elements aren't
192 fully constructed, don't untrack
193 them yet. */
194 if (!elt ||
195 _PyObject_GC_MAY_BE_TRACKED(elt))
196 return;
197 }
198 #ifdef SHOW_TRACK_COUNT
199 count_tracked--;
200 count_untracked++;
201 #endif
202 _PyObject_GC_UNTRACK(op);
203 }
204
205 PyObject *
PyTuple_Pack(Py_ssize_t n,...)206 PyTuple_Pack(Py_ssize_t n, ...)
207 {
208 Py_ssize_t i;
209 PyObject *o;
210 PyObject *result;
211 PyObject **items;
212 va_list vargs;
213
214 va_start(vargs, n);
215 result = PyTuple_New(n);
216 if (result == NULL) {
217 va_end(vargs);
218 return NULL;
219 }
220 items = ((PyTupleObject *)result)->ob_item;
221 for (i = 0; i < n; i++) {
222 o = va_arg(vargs, PyObject *);
223 Py_INCREF(o);
224 items[i] = o;
225 }
226 va_end(vargs);
227 return result;
228 }
229
230
231 /* Methods */
232
233 static void
tupledealloc(PyTupleObject * op)234 tupledealloc(PyTupleObject *op)
235 {
236 Py_ssize_t i;
237 Py_ssize_t len = Py_SIZE(op);
238 PyObject_GC_UnTrack(op);
239 Py_TRASHCAN_SAFE_BEGIN(op)
240 if (len > 0) {
241 i = len;
242 while (--i >= 0)
243 Py_XDECREF(op->ob_item[i]);
244 #if PyTuple_MAXSAVESIZE > 0
245 if (len < PyTuple_MAXSAVESIZE &&
246 numfree[len] < PyTuple_MAXFREELIST &&
247 Py_TYPE(op) == &PyTuple_Type)
248 {
249 op->ob_item[0] = (PyObject *) free_list[len];
250 numfree[len]++;
251 free_list[len] = op;
252 goto done; /* return */
253 }
254 #endif
255 }
256 Py_TYPE(op)->tp_free((PyObject *)op);
257 done:
258 Py_TRASHCAN_SAFE_END(op)
259 }
260
261 static PyObject *
tuplerepr(PyTupleObject * v)262 tuplerepr(PyTupleObject *v)
263 {
264 Py_ssize_t i, n;
265 _PyUnicodeWriter writer;
266
267 n = Py_SIZE(v);
268 if (n == 0)
269 return PyUnicode_FromString("()");
270
271 /* While not mutable, it is still possible to end up with a cycle in a
272 tuple through an object that stores itself within a tuple (and thus
273 infinitely asks for the repr of itself). This should only be
274 possible within a type. */
275 i = Py_ReprEnter((PyObject *)v);
276 if (i != 0) {
277 return i > 0 ? PyUnicode_FromString("(...)") : NULL;
278 }
279
280 _PyUnicodeWriter_Init(&writer);
281 writer.overallocate = 1;
282 if (Py_SIZE(v) > 1) {
283 /* "(" + "1" + ", 2" * (len - 1) + ")" */
284 writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
285 }
286 else {
287 /* "(1,)" */
288 writer.min_length = 4;
289 }
290
291 if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
292 goto error;
293
294 /* Do repr() on each element. */
295 for (i = 0; i < n; ++i) {
296 PyObject *s;
297
298 if (i > 0) {
299 if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
300 goto error;
301 }
302
303 if (Py_EnterRecursiveCall(" while getting the repr of a tuple"))
304 goto error;
305 s = PyObject_Repr(v->ob_item[i]);
306 Py_LeaveRecursiveCall();
307 if (s == NULL)
308 goto error;
309
310 if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
311 Py_DECREF(s);
312 goto error;
313 }
314 Py_DECREF(s);
315 }
316
317 writer.overallocate = 0;
318 if (n > 1) {
319 if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
320 goto error;
321 }
322 else {
323 if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
324 goto error;
325 }
326
327 Py_ReprLeave((PyObject *)v);
328 return _PyUnicodeWriter_Finish(&writer);
329
330 error:
331 _PyUnicodeWriter_Dealloc(&writer);
332 Py_ReprLeave((PyObject *)v);
333 return NULL;
334 }
335
336 /* The addend 82520, was selected from the range(0, 1000000) for
337 generating the greatest number of prime multipliers for tuples
338 upto length eight:
339
340 1082527, 1165049, 1082531, 1165057, 1247581, 1330103, 1082533,
341 1330111, 1412633, 1165069, 1247599, 1495177, 1577699
342
343 Tests have shown that it's not worth to cache the hash value, see
344 issue #9685.
345 */
346
347 static Py_hash_t
tuplehash(PyTupleObject * v)348 tuplehash(PyTupleObject *v)
349 {
350 Py_uhash_t x; /* Unsigned for defined overflow behavior. */
351 Py_hash_t y;
352 Py_ssize_t len = Py_SIZE(v);
353 PyObject **p;
354 Py_uhash_t mult = _PyHASH_MULTIPLIER;
355 x = 0x345678UL;
356 p = v->ob_item;
357 while (--len >= 0) {
358 y = PyObject_Hash(*p++);
359 if (y == -1)
360 return -1;
361 x = (x ^ y) * mult;
362 /* the cast might truncate len; that doesn't change hash stability */
363 mult += (Py_hash_t)(82520UL + len + len);
364 }
365 x += 97531UL;
366 if (x == (Py_uhash_t)-1)
367 x = -2;
368 return x;
369 }
370
371 static Py_ssize_t
tuplelength(PyTupleObject * a)372 tuplelength(PyTupleObject *a)
373 {
374 return Py_SIZE(a);
375 }
376
377 static int
tuplecontains(PyTupleObject * a,PyObject * el)378 tuplecontains(PyTupleObject *a, PyObject *el)
379 {
380 Py_ssize_t i;
381 int cmp;
382
383 for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i)
384 cmp = PyObject_RichCompareBool(el, PyTuple_GET_ITEM(a, i),
385 Py_EQ);
386 return cmp;
387 }
388
389 static PyObject *
tupleitem(PyTupleObject * a,Py_ssize_t i)390 tupleitem(PyTupleObject *a, Py_ssize_t i)
391 {
392 if (i < 0 || i >= Py_SIZE(a)) {
393 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
394 return NULL;
395 }
396 Py_INCREF(a->ob_item[i]);
397 return a->ob_item[i];
398 }
399
400 static PyObject *
tupleslice(PyTupleObject * a,Py_ssize_t ilow,Py_ssize_t ihigh)401 tupleslice(PyTupleObject *a, Py_ssize_t ilow,
402 Py_ssize_t ihigh)
403 {
404 PyTupleObject *np;
405 PyObject **src, **dest;
406 Py_ssize_t i;
407 Py_ssize_t len;
408 if (ilow < 0)
409 ilow = 0;
410 if (ihigh > Py_SIZE(a))
411 ihigh = Py_SIZE(a);
412 if (ihigh < ilow)
413 ihigh = ilow;
414 if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
415 Py_INCREF(a);
416 return (PyObject *)a;
417 }
418 len = ihigh - ilow;
419 np = (PyTupleObject *)PyTuple_New(len);
420 if (np == NULL)
421 return NULL;
422 src = a->ob_item + ilow;
423 dest = np->ob_item;
424 for (i = 0; i < len; i++) {
425 PyObject *v = src[i];
426 Py_INCREF(v);
427 dest[i] = v;
428 }
429 return (PyObject *)np;
430 }
431
432 PyObject *
PyTuple_GetSlice(PyObject * op,Py_ssize_t i,Py_ssize_t j)433 PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
434 {
435 if (op == NULL || !PyTuple_Check(op)) {
436 PyErr_BadInternalCall();
437 return NULL;
438 }
439 return tupleslice((PyTupleObject *)op, i, j);
440 }
441
442 static PyObject *
tupleconcat(PyTupleObject * a,PyObject * bb)443 tupleconcat(PyTupleObject *a, PyObject *bb)
444 {
445 Py_ssize_t size;
446 Py_ssize_t i;
447 PyObject **src, **dest;
448 PyTupleObject *np;
449 if (!PyTuple_Check(bb)) {
450 PyErr_Format(PyExc_TypeError,
451 "can only concatenate tuple (not \"%.200s\") to tuple",
452 Py_TYPE(bb)->tp_name);
453 return NULL;
454 }
455 #define b ((PyTupleObject *)bb)
456 if (Py_SIZE(a) > PY_SSIZE_T_MAX - Py_SIZE(b))
457 return PyErr_NoMemory();
458 size = Py_SIZE(a) + Py_SIZE(b);
459 np = (PyTupleObject *) PyTuple_New(size);
460 if (np == NULL) {
461 return NULL;
462 }
463 src = a->ob_item;
464 dest = np->ob_item;
465 for (i = 0; i < Py_SIZE(a); i++) {
466 PyObject *v = src[i];
467 Py_INCREF(v);
468 dest[i] = v;
469 }
470 src = b->ob_item;
471 dest = np->ob_item + Py_SIZE(a);
472 for (i = 0; i < Py_SIZE(b); i++) {
473 PyObject *v = src[i];
474 Py_INCREF(v);
475 dest[i] = v;
476 }
477 return (PyObject *)np;
478 #undef b
479 }
480
481 static PyObject *
tuplerepeat(PyTupleObject * a,Py_ssize_t n)482 tuplerepeat(PyTupleObject *a, Py_ssize_t n)
483 {
484 Py_ssize_t i, j;
485 Py_ssize_t size;
486 PyTupleObject *np;
487 PyObject **p, **items;
488 if (n < 0)
489 n = 0;
490 if (Py_SIZE(a) == 0 || n == 1) {
491 if (PyTuple_CheckExact(a)) {
492 /* Since tuples are immutable, we can return a shared
493 copy in this case */
494 Py_INCREF(a);
495 return (PyObject *)a;
496 }
497 if (Py_SIZE(a) == 0)
498 return PyTuple_New(0);
499 }
500 if (n > PY_SSIZE_T_MAX / Py_SIZE(a))
501 return PyErr_NoMemory();
502 size = Py_SIZE(a) * n;
503 np = (PyTupleObject *) PyTuple_New(size);
504 if (np == NULL)
505 return NULL;
506 p = np->ob_item;
507 items = a->ob_item;
508 for (i = 0; i < n; i++) {
509 for (j = 0; j < Py_SIZE(a); j++) {
510 *p = items[j];
511 Py_INCREF(*p);
512 p++;
513 }
514 }
515 return (PyObject *) np;
516 }
517
518 static PyObject *
tupleindex(PyTupleObject * self,PyObject * args)519 tupleindex(PyTupleObject *self, PyObject *args)
520 {
521 Py_ssize_t i, start=0, stop=Py_SIZE(self);
522 PyObject *v;
523
524 if (!PyArg_ParseTuple(args, "O|O&O&:index", &v,
525 _PyEval_SliceIndex, &start,
526 _PyEval_SliceIndex, &stop))
527 return NULL;
528 if (start < 0) {
529 start += Py_SIZE(self);
530 if (start < 0)
531 start = 0;
532 }
533 if (stop < 0) {
534 stop += Py_SIZE(self);
535 if (stop < 0)
536 stop = 0;
537 }
538 for (i = start; i < stop && i < Py_SIZE(self); i++) {
539 int cmp = PyObject_RichCompareBool(self->ob_item[i], v, Py_EQ);
540 if (cmp > 0)
541 return PyLong_FromSsize_t(i);
542 else if (cmp < 0)
543 return NULL;
544 }
545 PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
546 return NULL;
547 }
548
549 static PyObject *
tuplecount(PyTupleObject * self,PyObject * v)550 tuplecount(PyTupleObject *self, PyObject *v)
551 {
552 Py_ssize_t count = 0;
553 Py_ssize_t i;
554
555 for (i = 0; i < Py_SIZE(self); i++) {
556 int cmp = PyObject_RichCompareBool(self->ob_item[i], v, Py_EQ);
557 if (cmp > 0)
558 count++;
559 else if (cmp < 0)
560 return NULL;
561 }
562 return PyLong_FromSsize_t(count);
563 }
564
565 static int
tupletraverse(PyTupleObject * o,visitproc visit,void * arg)566 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
567 {
568 Py_ssize_t i;
569
570 for (i = Py_SIZE(o); --i >= 0; )
571 Py_VISIT(o->ob_item[i]);
572 return 0;
573 }
574
575 static PyObject *
tuplerichcompare(PyObject * v,PyObject * w,int op)576 tuplerichcompare(PyObject *v, PyObject *w, int op)
577 {
578 PyTupleObject *vt, *wt;
579 Py_ssize_t i;
580 Py_ssize_t vlen, wlen;
581
582 if (!PyTuple_Check(v) || !PyTuple_Check(w))
583 Py_RETURN_NOTIMPLEMENTED;
584
585 vt = (PyTupleObject *)v;
586 wt = (PyTupleObject *)w;
587
588 vlen = Py_SIZE(vt);
589 wlen = Py_SIZE(wt);
590
591 /* Note: the corresponding code for lists has an "early out" test
592 * here when op is EQ or NE and the lengths differ. That pays there,
593 * but Tim was unable to find any real code where EQ/NE tuple
594 * compares don't have the same length, so testing for it here would
595 * have cost without benefit.
596 */
597
598 /* Search for the first index where items are different.
599 * Note that because tuples are immutable, it's safe to reuse
600 * vlen and wlen across the comparison calls.
601 */
602 for (i = 0; i < vlen && i < wlen; i++) {
603 int k = PyObject_RichCompareBool(vt->ob_item[i],
604 wt->ob_item[i], Py_EQ);
605 if (k < 0)
606 return NULL;
607 if (!k)
608 break;
609 }
610
611 if (i >= vlen || i >= wlen) {
612 /* No more items to compare -- compare sizes */
613 int cmp;
614 PyObject *res;
615 switch (op) {
616 case Py_LT: cmp = vlen < wlen; break;
617 case Py_LE: cmp = vlen <= wlen; break;
618 case Py_EQ: cmp = vlen == wlen; break;
619 case Py_NE: cmp = vlen != wlen; break;
620 case Py_GT: cmp = vlen > wlen; break;
621 case Py_GE: cmp = vlen >= wlen; break;
622 default: return NULL; /* cannot happen */
623 }
624 if (cmp)
625 res = Py_True;
626 else
627 res = Py_False;
628 Py_INCREF(res);
629 return res;
630 }
631
632 /* We have an item that differs -- shortcuts for EQ/NE */
633 if (op == Py_EQ) {
634 Py_INCREF(Py_False);
635 return Py_False;
636 }
637 if (op == Py_NE) {
638 Py_INCREF(Py_True);
639 return Py_True;
640 }
641
642 /* Compare the final item again using the proper operator */
643 return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
644 }
645
646 static PyObject *
647 tuple_subtype_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
648
649 static PyObject *
tuple_new(PyTypeObject * type,PyObject * args,PyObject * kwds)650 tuple_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
651 {
652 PyObject *arg = NULL;
653 static char *kwlist[] = {"sequence", 0};
654
655 if (type != &PyTuple_Type)
656 return tuple_subtype_new(type, args, kwds);
657 if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O:tuple", kwlist, &arg))
658 return NULL;
659
660 if (arg == NULL)
661 return PyTuple_New(0);
662 else
663 return PySequence_Tuple(arg);
664 }
665
666 static PyObject *
tuple_subtype_new(PyTypeObject * type,PyObject * args,PyObject * kwds)667 tuple_subtype_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
668 {
669 PyObject *tmp, *newobj, *item;
670 Py_ssize_t i, n;
671
672 assert(PyType_IsSubtype(type, &PyTuple_Type));
673 tmp = tuple_new(&PyTuple_Type, args, kwds);
674 if (tmp == NULL)
675 return NULL;
676 assert(PyTuple_Check(tmp));
677 newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
678 if (newobj == NULL)
679 return NULL;
680 for (i = 0; i < n; i++) {
681 item = PyTuple_GET_ITEM(tmp, i);
682 Py_INCREF(item);
683 PyTuple_SET_ITEM(newobj, i, item);
684 }
685 Py_DECREF(tmp);
686 return newobj;
687 }
688
689 PyDoc_STRVAR(tuple_doc,
690 "tuple() -> empty tuple\n\
691 tuple(iterable) -> tuple initialized from iterable's items\n\
692 \n\
693 If the argument is a tuple, the return value is the same object.");
694
695 static PySequenceMethods tuple_as_sequence = {
696 (lenfunc)tuplelength, /* sq_length */
697 (binaryfunc)tupleconcat, /* sq_concat */
698 (ssizeargfunc)tuplerepeat, /* sq_repeat */
699 (ssizeargfunc)tupleitem, /* sq_item */
700 0, /* sq_slice */
701 0, /* sq_ass_item */
702 0, /* sq_ass_slice */
703 (objobjproc)tuplecontains, /* sq_contains */
704 };
705
706 static PyObject*
tuplesubscript(PyTupleObject * self,PyObject * item)707 tuplesubscript(PyTupleObject* self, PyObject* item)
708 {
709 if (PyIndex_Check(item)) {
710 Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
711 if (i == -1 && PyErr_Occurred())
712 return NULL;
713 if (i < 0)
714 i += PyTuple_GET_SIZE(self);
715 return tupleitem(self, i);
716 }
717 else if (PySlice_Check(item)) {
718 Py_ssize_t start, stop, step, slicelength, cur, i;
719 PyObject* result;
720 PyObject* it;
721 PyObject **src, **dest;
722
723 if (PySlice_GetIndicesEx(item,
724 PyTuple_GET_SIZE(self),
725 &start, &stop, &step, &slicelength) < 0) {
726 return NULL;
727 }
728
729 if (slicelength <= 0) {
730 return PyTuple_New(0);
731 }
732 else if (start == 0 && step == 1 &&
733 slicelength == PyTuple_GET_SIZE(self) &&
734 PyTuple_CheckExact(self)) {
735 Py_INCREF(self);
736 return (PyObject *)self;
737 }
738 else {
739 result = PyTuple_New(slicelength);
740 if (!result) return NULL;
741
742 src = self->ob_item;
743 dest = ((PyTupleObject *)result)->ob_item;
744 for (cur = start, i = 0; i < slicelength;
745 cur += step, i++) {
746 it = src[cur];
747 Py_INCREF(it);
748 dest[i] = it;
749 }
750
751 return result;
752 }
753 }
754 else {
755 PyErr_Format(PyExc_TypeError,
756 "tuple indices must be integers or slices, not %.200s",
757 Py_TYPE(item)->tp_name);
758 return NULL;
759 }
760 }
761
762 static PyObject *
tuple_getnewargs(PyTupleObject * v)763 tuple_getnewargs(PyTupleObject *v)
764 {
765 return Py_BuildValue("(N)", tupleslice(v, 0, Py_SIZE(v)));
766
767 }
768
769 PyDoc_STRVAR(index_doc,
770 "T.index(value, [start, [stop]]) -> integer -- return first index of value.\n"
771 "Raises ValueError if the value is not present."
772 );
773 PyDoc_STRVAR(count_doc,
774 "T.count(value) -> integer -- return number of occurrences of value");
775
776 static PyMethodDef tuple_methods[] = {
777 {"__getnewargs__", (PyCFunction)tuple_getnewargs, METH_NOARGS},
778 {"index", (PyCFunction)tupleindex, METH_VARARGS, index_doc},
779 {"count", (PyCFunction)tuplecount, METH_O, count_doc},
780 {NULL, NULL} /* sentinel */
781 };
782
783 static PyMappingMethods tuple_as_mapping = {
784 (lenfunc)tuplelength,
785 (binaryfunc)tuplesubscript,
786 0
787 };
788
789 static PyObject *tuple_iter(PyObject *seq);
790
791 PyTypeObject PyTuple_Type = {
792 PyVarObject_HEAD_INIT(&PyType_Type, 0)
793 "tuple",
794 sizeof(PyTupleObject) - sizeof(PyObject *),
795 sizeof(PyObject *),
796 (destructor)tupledealloc, /* tp_dealloc */
797 0, /* tp_print */
798 0, /* tp_getattr */
799 0, /* tp_setattr */
800 0, /* tp_reserved */
801 (reprfunc)tuplerepr, /* tp_repr */
802 0, /* tp_as_number */
803 &tuple_as_sequence, /* tp_as_sequence */
804 &tuple_as_mapping, /* tp_as_mapping */
805 (hashfunc)tuplehash, /* tp_hash */
806 0, /* tp_call */
807 0, /* tp_str */
808 PyObject_GenericGetAttr, /* tp_getattro */
809 0, /* tp_setattro */
810 0, /* tp_as_buffer */
811 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
812 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS, /* tp_flags */
813 tuple_doc, /* tp_doc */
814 (traverseproc)tupletraverse, /* tp_traverse */
815 0, /* tp_clear */
816 tuplerichcompare, /* tp_richcompare */
817 0, /* tp_weaklistoffset */
818 tuple_iter, /* tp_iter */
819 0, /* tp_iternext */
820 tuple_methods, /* tp_methods */
821 0, /* tp_members */
822 0, /* tp_getset */
823 0, /* tp_base */
824 0, /* tp_dict */
825 0, /* tp_descr_get */
826 0, /* tp_descr_set */
827 0, /* tp_dictoffset */
828 0, /* tp_init */
829 0, /* tp_alloc */
830 tuple_new, /* tp_new */
831 PyObject_GC_Del, /* tp_free */
832 };
833
834 /* The following function breaks the notion that tuples are immutable:
835 it changes the size of a tuple. We get away with this only if there
836 is only one module referencing the object. You can also think of it
837 as creating a new tuple object and destroying the old one, only more
838 efficiently. In any case, don't use this if the tuple may already be
839 known to some other part of the code. */
840
841 int
_PyTuple_Resize(PyObject ** pv,Py_ssize_t newsize)842 _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
843 {
844 PyTupleObject *v;
845 PyTupleObject *sv;
846 Py_ssize_t i;
847 Py_ssize_t oldsize;
848
849 v = (PyTupleObject *) *pv;
850 if (v == NULL || Py_TYPE(v) != &PyTuple_Type ||
851 (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) {
852 *pv = 0;
853 Py_XDECREF(v);
854 PyErr_BadInternalCall();
855 return -1;
856 }
857 oldsize = Py_SIZE(v);
858 if (oldsize == newsize)
859 return 0;
860
861 if (oldsize == 0) {
862 /* Empty tuples are often shared, so we should never
863 resize them in-place even if we do own the only
864 (current) reference */
865 Py_DECREF(v);
866 *pv = PyTuple_New(newsize);
867 return *pv == NULL ? -1 : 0;
868 }
869
870 /* XXX UNREF/NEWREF interface should be more symmetrical */
871 _Py_DEC_REFTOTAL;
872 if (_PyObject_GC_IS_TRACKED(v))
873 _PyObject_GC_UNTRACK(v);
874 _Py_ForgetReference((PyObject *) v);
875 /* DECREF items deleted by shrinkage */
876 for (i = newsize; i < oldsize; i++) {
877 Py_CLEAR(v->ob_item[i]);
878 }
879 sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
880 if (sv == NULL) {
881 *pv = NULL;
882 PyObject_GC_Del(v);
883 return -1;
884 }
885 _Py_NewReference((PyObject *) sv);
886 /* Zero out items added by growing */
887 if (newsize > oldsize)
888 memset(&sv->ob_item[oldsize], 0,
889 sizeof(*sv->ob_item) * (newsize - oldsize));
890 *pv = (PyObject *) sv;
891 _PyObject_GC_TRACK(sv);
892 return 0;
893 }
894
895 int
PyTuple_ClearFreeList(void)896 PyTuple_ClearFreeList(void)
897 {
898 int freelist_size = 0;
899 #if PyTuple_MAXSAVESIZE > 0
900 int i;
901 for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
902 PyTupleObject *p, *q;
903 p = free_list[i];
904 freelist_size += numfree[i];
905 free_list[i] = NULL;
906 numfree[i] = 0;
907 while (p) {
908 q = p;
909 p = (PyTupleObject *)(p->ob_item[0]);
910 PyObject_GC_Del(q);
911 }
912 }
913 #endif
914 return freelist_size;
915 }
916
917 void
PyTuple_Fini(void)918 PyTuple_Fini(void)
919 {
920 #if PyTuple_MAXSAVESIZE > 0
921 /* empty tuples are used all over the place and applications may
922 * rely on the fact that an empty tuple is a singleton. */
923 Py_CLEAR(free_list[0]);
924
925 (void)PyTuple_ClearFreeList();
926 #endif
927 #ifdef SHOW_TRACK_COUNT
928 show_track();
929 #endif
930 }
931
932 /*********************** Tuple Iterator **************************/
933
934 typedef struct {
935 PyObject_HEAD
936 Py_ssize_t it_index;
937 PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
938 } tupleiterobject;
939
940 static void
tupleiter_dealloc(tupleiterobject * it)941 tupleiter_dealloc(tupleiterobject *it)
942 {
943 _PyObject_GC_UNTRACK(it);
944 Py_XDECREF(it->it_seq);
945 PyObject_GC_Del(it);
946 }
947
948 static int
tupleiter_traverse(tupleiterobject * it,visitproc visit,void * arg)949 tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
950 {
951 Py_VISIT(it->it_seq);
952 return 0;
953 }
954
955 static PyObject *
tupleiter_next(tupleiterobject * it)956 tupleiter_next(tupleiterobject *it)
957 {
958 PyTupleObject *seq;
959 PyObject *item;
960
961 assert(it != NULL);
962 seq = it->it_seq;
963 if (seq == NULL)
964 return NULL;
965 assert(PyTuple_Check(seq));
966
967 if (it->it_index < PyTuple_GET_SIZE(seq)) {
968 item = PyTuple_GET_ITEM(seq, it->it_index);
969 ++it->it_index;
970 Py_INCREF(item);
971 return item;
972 }
973
974 it->it_seq = NULL;
975 Py_DECREF(seq);
976 return NULL;
977 }
978
979 static PyObject *
tupleiter_len(tupleiterobject * it)980 tupleiter_len(tupleiterobject *it)
981 {
982 Py_ssize_t len = 0;
983 if (it->it_seq)
984 len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
985 return PyLong_FromSsize_t(len);
986 }
987
988 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
989
990 static PyObject *
tupleiter_reduce(tupleiterobject * it)991 tupleiter_reduce(tupleiterobject *it)
992 {
993 if (it->it_seq)
994 return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
995 it->it_seq, it->it_index);
996 else
997 return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter"));
998 }
999
1000 static PyObject *
tupleiter_setstate(tupleiterobject * it,PyObject * state)1001 tupleiter_setstate(tupleiterobject *it, PyObject *state)
1002 {
1003 Py_ssize_t index = PyLong_AsSsize_t(state);
1004 if (index == -1 && PyErr_Occurred())
1005 return NULL;
1006 if (it->it_seq != NULL) {
1007 if (index < 0)
1008 index = 0;
1009 else if (index > PyTuple_GET_SIZE(it->it_seq))
1010 index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1011 it->it_index = index;
1012 }
1013 Py_RETURN_NONE;
1014 }
1015
1016 PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1017 PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1018
1019 static PyMethodDef tupleiter_methods[] = {
1020 {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1021 {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1022 {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1023 {NULL, NULL} /* sentinel */
1024 };
1025
1026 PyTypeObject PyTupleIter_Type = {
1027 PyVarObject_HEAD_INIT(&PyType_Type, 0)
1028 "tuple_iterator", /* tp_name */
1029 sizeof(tupleiterobject), /* tp_basicsize */
1030 0, /* tp_itemsize */
1031 /* methods */
1032 (destructor)tupleiter_dealloc, /* tp_dealloc */
1033 0, /* tp_print */
1034 0, /* tp_getattr */
1035 0, /* tp_setattr */
1036 0, /* tp_reserved */
1037 0, /* tp_repr */
1038 0, /* tp_as_number */
1039 0, /* tp_as_sequence */
1040 0, /* tp_as_mapping */
1041 0, /* tp_hash */
1042 0, /* tp_call */
1043 0, /* tp_str */
1044 PyObject_GenericGetAttr, /* tp_getattro */
1045 0, /* tp_setattro */
1046 0, /* tp_as_buffer */
1047 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1048 0, /* tp_doc */
1049 (traverseproc)tupleiter_traverse, /* tp_traverse */
1050 0, /* tp_clear */
1051 0, /* tp_richcompare */
1052 0, /* tp_weaklistoffset */
1053 PyObject_SelfIter, /* tp_iter */
1054 (iternextfunc)tupleiter_next, /* tp_iternext */
1055 tupleiter_methods, /* tp_methods */
1056 0,
1057 };
1058
1059 static PyObject *
tuple_iter(PyObject * seq)1060 tuple_iter(PyObject *seq)
1061 {
1062 tupleiterobject *it;
1063
1064 if (!PyTuple_Check(seq)) {
1065 PyErr_BadInternalCall();
1066 return NULL;
1067 }
1068 it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
1069 if (it == NULL)
1070 return NULL;
1071 it->it_index = 0;
1072 Py_INCREF(seq);
1073 it->it_seq = (PyTupleObject *)seq;
1074 _PyObject_GC_TRACK(it);
1075 return (PyObject *)it;
1076 }
1077