1
2 /* Tuple object implementation */
3
4 #include "Python.h"
5 #include "pycore_abstract.h" // _PyIndex_Check()
6 #include "pycore_accu.h"
7 #include "pycore_gc.h" // _PyObject_GC_IS_TRACKED()
8 #include "pycore_object.h"
9
10 /*[clinic input]
11 class tuple "PyTupleObject *" "&PyTuple_Type"
12 [clinic start generated code]*/
13 /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
14
15 #include "clinic/tupleobject.c.h"
16
17 /* Speed optimization to avoid frequent malloc/free of small tuples */
18 #ifndef PyTuple_MAXSAVESIZE
19 #define PyTuple_MAXSAVESIZE 20 /* Largest tuple to save on free list */
20 #endif
21 #ifndef PyTuple_MAXFREELIST
22 #define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */
23 #endif
24
25 #if PyTuple_MAXSAVESIZE > 0
26 /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty
27 tuple () of which at most one instance will be allocated.
28 */
29 static PyTupleObject *free_list[PyTuple_MAXSAVESIZE];
30 static int numfree[PyTuple_MAXSAVESIZE];
31 #endif
32
33 static inline void
tuple_gc_track(PyTupleObject * op)34 tuple_gc_track(PyTupleObject *op)
35 {
36 _PyObject_GC_TRACK(op);
37 }
38
39 /* Print summary info about the state of the optimized allocator */
40 void
_PyTuple_DebugMallocStats(FILE * out)41 _PyTuple_DebugMallocStats(FILE *out)
42 {
43 #if PyTuple_MAXSAVESIZE > 0
44 int i;
45 char buf[128];
46 for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
47 PyOS_snprintf(buf, sizeof(buf),
48 "free %d-sized PyTupleObject", i);
49 _PyDebugAllocatorStats(out,
50 buf,
51 numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i));
52 }
53 #endif
54 }
55
56 /* Allocate an uninitialized tuple object. Before making it public following
57 steps must be done:
58 - initialize its items
59 - call tuple_gc_track() on it
60 Because the empty tuple is always reused and it's already tracked by GC,
61 this function must not be called with size == 0 (unless from PyTuple_New()
62 which wraps this function).
63 */
64 static PyTupleObject *
tuple_alloc(Py_ssize_t size)65 tuple_alloc(Py_ssize_t size)
66 {
67 PyTupleObject *op;
68 if (size < 0) {
69 PyErr_BadInternalCall();
70 return NULL;
71 }
72 #if PyTuple_MAXSAVESIZE > 0
73 if (size < PyTuple_MAXSAVESIZE && (op = free_list[size]) != NULL) {
74 assert(size != 0);
75 free_list[size] = (PyTupleObject *) op->ob_item[0];
76 numfree[size]--;
77 /* Inline PyObject_InitVar */
78 #ifdef Py_TRACE_REFS
79 Py_SIZE(op) = size;
80 Py_TYPE(op) = &PyTuple_Type;
81 #endif
82 _Py_NewReference((PyObject *)op);
83 }
84 else
85 #endif
86 {
87 /* Check for overflow */
88 if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) -
89 sizeof(PyObject *))) / sizeof(PyObject *)) {
90 return (PyTupleObject *)PyErr_NoMemory();
91 }
92 op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
93 if (op == NULL)
94 return NULL;
95 }
96 return op;
97 }
98
99 PyObject *
PyTuple_New(Py_ssize_t size)100 PyTuple_New(Py_ssize_t size)
101 {
102 PyTupleObject *op;
103 #if PyTuple_MAXSAVESIZE > 0
104 if (size == 0 && free_list[0]) {
105 op = free_list[0];
106 Py_INCREF(op);
107 return (PyObject *) op;
108 }
109 #endif
110 op = tuple_alloc(size);
111 if (op == NULL) {
112 return NULL;
113 }
114 for (Py_ssize_t i = 0; i < size; i++) {
115 op->ob_item[i] = NULL;
116 }
117 #if PyTuple_MAXSAVESIZE > 0
118 if (size == 0) {
119 free_list[0] = op;
120 ++numfree[0];
121 Py_INCREF(op); /* extra INCREF so that this is never freed */
122 }
123 #endif
124 tuple_gc_track(op);
125 return (PyObject *) op;
126 }
127
128 Py_ssize_t
PyTuple_Size(PyObject * op)129 PyTuple_Size(PyObject *op)
130 {
131 if (!PyTuple_Check(op)) {
132 PyErr_BadInternalCall();
133 return -1;
134 }
135 else
136 return Py_SIZE(op);
137 }
138
139 PyObject *
PyTuple_GetItem(PyObject * op,Py_ssize_t i)140 PyTuple_GetItem(PyObject *op, Py_ssize_t i)
141 {
142 if (!PyTuple_Check(op)) {
143 PyErr_BadInternalCall();
144 return NULL;
145 }
146 if (i < 0 || i >= Py_SIZE(op)) {
147 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
148 return NULL;
149 }
150 return ((PyTupleObject *)op) -> ob_item[i];
151 }
152
153 int
PyTuple_SetItem(PyObject * op,Py_ssize_t i,PyObject * newitem)154 PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
155 {
156 PyObject **p;
157 if (!PyTuple_Check(op) || Py_REFCNT(op) != 1) {
158 Py_XDECREF(newitem);
159 PyErr_BadInternalCall();
160 return -1;
161 }
162 if (i < 0 || i >= Py_SIZE(op)) {
163 Py_XDECREF(newitem);
164 PyErr_SetString(PyExc_IndexError,
165 "tuple assignment index out of range");
166 return -1;
167 }
168 p = ((PyTupleObject *)op) -> ob_item + i;
169 Py_XSETREF(*p, newitem);
170 return 0;
171 }
172
173 void
_PyTuple_MaybeUntrack(PyObject * op)174 _PyTuple_MaybeUntrack(PyObject *op)
175 {
176 PyTupleObject *t;
177 Py_ssize_t i, n;
178
179 if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
180 return;
181 t = (PyTupleObject *) op;
182 n = Py_SIZE(t);
183 for (i = 0; i < n; i++) {
184 PyObject *elt = PyTuple_GET_ITEM(t, i);
185 /* Tuple with NULL elements aren't
186 fully constructed, don't untrack
187 them yet. */
188 if (!elt ||
189 _PyObject_GC_MAY_BE_TRACKED(elt))
190 return;
191 }
192 _PyObject_GC_UNTRACK(op);
193 }
194
195 PyObject *
PyTuple_Pack(Py_ssize_t n,...)196 PyTuple_Pack(Py_ssize_t n, ...)
197 {
198 Py_ssize_t i;
199 PyObject *o;
200 PyObject **items;
201 va_list vargs;
202
203 if (n == 0) {
204 return PyTuple_New(0);
205 }
206
207 va_start(vargs, n);
208 PyTupleObject *result = tuple_alloc(n);
209 if (result == NULL) {
210 va_end(vargs);
211 return NULL;
212 }
213 items = result->ob_item;
214 for (i = 0; i < n; i++) {
215 o = va_arg(vargs, PyObject *);
216 Py_INCREF(o);
217 items[i] = o;
218 }
219 va_end(vargs);
220 tuple_gc_track(result);
221 return (PyObject *)result;
222 }
223
224
225 /* Methods */
226
227 static void
tupledealloc(PyTupleObject * op)228 tupledealloc(PyTupleObject *op)
229 {
230 Py_ssize_t i;
231 Py_ssize_t len = Py_SIZE(op);
232 PyObject_GC_UnTrack(op);
233 Py_TRASHCAN_BEGIN(op, tupledealloc)
234 if (len > 0) {
235 i = len;
236 while (--i >= 0)
237 Py_XDECREF(op->ob_item[i]);
238 #if PyTuple_MAXSAVESIZE > 0
239 if (len < PyTuple_MAXSAVESIZE &&
240 numfree[len] < PyTuple_MAXFREELIST &&
241 Py_IS_TYPE(op, &PyTuple_Type))
242 {
243 op->ob_item[0] = (PyObject *) free_list[len];
244 numfree[len]++;
245 free_list[len] = op;
246 goto done; /* return */
247 }
248 #endif
249 }
250 Py_TYPE(op)->tp_free((PyObject *)op);
251 #if PyTuple_MAXSAVESIZE > 0
252 done:
253 #endif
254 Py_TRASHCAN_END
255 }
256
257 static PyObject *
tuplerepr(PyTupleObject * v)258 tuplerepr(PyTupleObject *v)
259 {
260 Py_ssize_t i, n;
261 _PyUnicodeWriter writer;
262
263 n = Py_SIZE(v);
264 if (n == 0)
265 return PyUnicode_FromString("()");
266
267 /* While not mutable, it is still possible to end up with a cycle in a
268 tuple through an object that stores itself within a tuple (and thus
269 infinitely asks for the repr of itself). This should only be
270 possible within a type. */
271 i = Py_ReprEnter((PyObject *)v);
272 if (i != 0) {
273 return i > 0 ? PyUnicode_FromString("(...)") : NULL;
274 }
275
276 _PyUnicodeWriter_Init(&writer);
277 writer.overallocate = 1;
278 if (Py_SIZE(v) > 1) {
279 /* "(" + "1" + ", 2" * (len - 1) + ")" */
280 writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
281 }
282 else {
283 /* "(1,)" */
284 writer.min_length = 4;
285 }
286
287 if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
288 goto error;
289
290 /* Do repr() on each element. */
291 for (i = 0; i < n; ++i) {
292 PyObject *s;
293
294 if (i > 0) {
295 if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
296 goto error;
297 }
298
299 s = PyObject_Repr(v->ob_item[i]);
300 if (s == NULL)
301 goto error;
302
303 if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
304 Py_DECREF(s);
305 goto error;
306 }
307 Py_DECREF(s);
308 }
309
310 writer.overallocate = 0;
311 if (n > 1) {
312 if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
313 goto error;
314 }
315 else {
316 if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
317 goto error;
318 }
319
320 Py_ReprLeave((PyObject *)v);
321 return _PyUnicodeWriter_Finish(&writer);
322
323 error:
324 _PyUnicodeWriter_Dealloc(&writer);
325 Py_ReprLeave((PyObject *)v);
326 return NULL;
327 }
328
329
330 /* Hash for tuples. This is a slightly simplified version of the xxHash
331 non-cryptographic hash:
332 - we do not use any parallellism, there is only 1 accumulator.
333 - we drop the final mixing since this is just a permutation of the
334 output space: it does not help against collisions.
335 - at the end, we mangle the length with a single constant.
336 For the xxHash specification, see
337 https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
338
339 Below are the official constants from the xxHash specification. Optimizing
340 compilers should emit a single "rotate" instruction for the
341 _PyHASH_XXROTATE() expansion. If that doesn't happen for some important
342 platform, the macro could be changed to expand to a platform-specific rotate
343 spelling instead.
344 */
345 #if SIZEOF_PY_UHASH_T > 4
346 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
347 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
348 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
349 #define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */
350 #else
351 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
352 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
353 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
354 #define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */
355 #endif
356
357 /* Tests have shown that it's not worth to cache the hash value, see
358 https://bugs.python.org/issue9685 */
359 static Py_hash_t
tuplehash(PyTupleObject * v)360 tuplehash(PyTupleObject *v)
361 {
362 Py_ssize_t i, len = Py_SIZE(v);
363 PyObject **item = v->ob_item;
364
365 Py_uhash_t acc = _PyHASH_XXPRIME_5;
366 for (i = 0; i < len; i++) {
367 Py_uhash_t lane = PyObject_Hash(item[i]);
368 if (lane == (Py_uhash_t)-1) {
369 return -1;
370 }
371 acc += lane * _PyHASH_XXPRIME_2;
372 acc = _PyHASH_XXROTATE(acc);
373 acc *= _PyHASH_XXPRIME_1;
374 }
375
376 /* Add input length, mangled to keep the historical value of hash(()). */
377 acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL);
378
379 if (acc == (Py_uhash_t)-1) {
380 return 1546275796;
381 }
382 return acc;
383 }
384
385 static Py_ssize_t
tuplelength(PyTupleObject * a)386 tuplelength(PyTupleObject *a)
387 {
388 return Py_SIZE(a);
389 }
390
391 static int
tuplecontains(PyTupleObject * a,PyObject * el)392 tuplecontains(PyTupleObject *a, PyObject *el)
393 {
394 Py_ssize_t i;
395 int cmp;
396
397 for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i)
398 cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ);
399 return cmp;
400 }
401
402 static PyObject *
tupleitem(PyTupleObject * a,Py_ssize_t i)403 tupleitem(PyTupleObject *a, Py_ssize_t i)
404 {
405 if (i < 0 || i >= Py_SIZE(a)) {
406 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
407 return NULL;
408 }
409 Py_INCREF(a->ob_item[i]);
410 return a->ob_item[i];
411 }
412
413 PyObject *
_PyTuple_FromArray(PyObject * const * src,Py_ssize_t n)414 _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
415 {
416 if (n == 0) {
417 return PyTuple_New(0);
418 }
419
420 PyTupleObject *tuple = tuple_alloc(n);
421 if (tuple == NULL) {
422 return NULL;
423 }
424 PyObject **dst = tuple->ob_item;
425 for (Py_ssize_t i = 0; i < n; i++) {
426 PyObject *item = src[i];
427 Py_INCREF(item);
428 dst[i] = item;
429 }
430 tuple_gc_track(tuple);
431 return (PyObject *)tuple;
432 }
433
434 static PyObject *
tupleslice(PyTupleObject * a,Py_ssize_t ilow,Py_ssize_t ihigh)435 tupleslice(PyTupleObject *a, Py_ssize_t ilow,
436 Py_ssize_t ihigh)
437 {
438 if (ilow < 0)
439 ilow = 0;
440 if (ihigh > Py_SIZE(a))
441 ihigh = Py_SIZE(a);
442 if (ihigh < ilow)
443 ihigh = ilow;
444 if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
445 Py_INCREF(a);
446 return (PyObject *)a;
447 }
448 return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
449 }
450
451 PyObject *
PyTuple_GetSlice(PyObject * op,Py_ssize_t i,Py_ssize_t j)452 PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
453 {
454 if (op == NULL || !PyTuple_Check(op)) {
455 PyErr_BadInternalCall();
456 return NULL;
457 }
458 return tupleslice((PyTupleObject *)op, i, j);
459 }
460
461 static PyObject *
tupleconcat(PyTupleObject * a,PyObject * bb)462 tupleconcat(PyTupleObject *a, PyObject *bb)
463 {
464 Py_ssize_t size;
465 Py_ssize_t i;
466 PyObject **src, **dest;
467 PyTupleObject *np;
468 if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
469 Py_INCREF(bb);
470 return bb;
471 }
472 if (!PyTuple_Check(bb)) {
473 PyErr_Format(PyExc_TypeError,
474 "can only concatenate tuple (not \"%.200s\") to tuple",
475 Py_TYPE(bb)->tp_name);
476 return NULL;
477 }
478 #define b ((PyTupleObject *)bb)
479 if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
480 Py_INCREF(a);
481 return (PyObject *)a;
482 }
483 if (Py_SIZE(a) > PY_SSIZE_T_MAX - Py_SIZE(b))
484 return PyErr_NoMemory();
485 size = Py_SIZE(a) + Py_SIZE(b);
486 if (size == 0) {
487 return PyTuple_New(0);
488 }
489
490 np = tuple_alloc(size);
491 if (np == NULL) {
492 return NULL;
493 }
494 src = a->ob_item;
495 dest = np->ob_item;
496 for (i = 0; i < Py_SIZE(a); i++) {
497 PyObject *v = src[i];
498 Py_INCREF(v);
499 dest[i] = v;
500 }
501 src = b->ob_item;
502 dest = np->ob_item + Py_SIZE(a);
503 for (i = 0; i < Py_SIZE(b); i++) {
504 PyObject *v = src[i];
505 Py_INCREF(v);
506 dest[i] = v;
507 }
508 tuple_gc_track(np);
509 return (PyObject *)np;
510 #undef b
511 }
512
513 static PyObject *
tuplerepeat(PyTupleObject * a,Py_ssize_t n)514 tuplerepeat(PyTupleObject *a, Py_ssize_t n)
515 {
516 Py_ssize_t i, j;
517 Py_ssize_t size;
518 PyTupleObject *np;
519 PyObject **p, **items;
520 if (Py_SIZE(a) == 0 || n == 1) {
521 if (PyTuple_CheckExact(a)) {
522 /* Since tuples are immutable, we can return a shared
523 copy in this case */
524 Py_INCREF(a);
525 return (PyObject *)a;
526 }
527 }
528 if (Py_SIZE(a) == 0 || n <= 0) {
529 return PyTuple_New(0);
530 }
531 if (n > PY_SSIZE_T_MAX / Py_SIZE(a))
532 return PyErr_NoMemory();
533 size = Py_SIZE(a) * n;
534 np = tuple_alloc(size);
535 if (np == NULL)
536 return NULL;
537 p = np->ob_item;
538 items = a->ob_item;
539 for (i = 0; i < n; i++) {
540 for (j = 0; j < Py_SIZE(a); j++) {
541 *p = items[j];
542 Py_INCREF(*p);
543 p++;
544 }
545 }
546 tuple_gc_track(np);
547 return (PyObject *) np;
548 }
549
550 /*[clinic input]
551 tuple.index
552
553 value: object
554 start: slice_index(accept={int}) = 0
555 stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
556 /
557
558 Return first index of value.
559
560 Raises ValueError if the value is not present.
561 [clinic start generated code]*/
562
563 static PyObject *
tuple_index_impl(PyTupleObject * self,PyObject * value,Py_ssize_t start,Py_ssize_t stop)564 tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
565 Py_ssize_t stop)
566 /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
567 {
568 Py_ssize_t i;
569
570 if (start < 0) {
571 start += Py_SIZE(self);
572 if (start < 0)
573 start = 0;
574 }
575 if (stop < 0) {
576 stop += Py_SIZE(self);
577 }
578 else if (stop > Py_SIZE(self)) {
579 stop = Py_SIZE(self);
580 }
581 for (i = start; i < stop; i++) {
582 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
583 if (cmp > 0)
584 return PyLong_FromSsize_t(i);
585 else if (cmp < 0)
586 return NULL;
587 }
588 PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
589 return NULL;
590 }
591
592 /*[clinic input]
593 tuple.count
594
595 value: object
596 /
597
598 Return number of occurrences of value.
599 [clinic start generated code]*/
600
601 static PyObject *
tuple_count(PyTupleObject * self,PyObject * value)602 tuple_count(PyTupleObject *self, PyObject *value)
603 /*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/
604 {
605 Py_ssize_t count = 0;
606 Py_ssize_t i;
607
608 for (i = 0; i < Py_SIZE(self); i++) {
609 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
610 if (cmp > 0)
611 count++;
612 else if (cmp < 0)
613 return NULL;
614 }
615 return PyLong_FromSsize_t(count);
616 }
617
618 static int
tupletraverse(PyTupleObject * o,visitproc visit,void * arg)619 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
620 {
621 Py_ssize_t i;
622
623 for (i = Py_SIZE(o); --i >= 0; )
624 Py_VISIT(o->ob_item[i]);
625 return 0;
626 }
627
628 static PyObject *
tuplerichcompare(PyObject * v,PyObject * w,int op)629 tuplerichcompare(PyObject *v, PyObject *w, int op)
630 {
631 PyTupleObject *vt, *wt;
632 Py_ssize_t i;
633 Py_ssize_t vlen, wlen;
634
635 if (!PyTuple_Check(v) || !PyTuple_Check(w))
636 Py_RETURN_NOTIMPLEMENTED;
637
638 vt = (PyTupleObject *)v;
639 wt = (PyTupleObject *)w;
640
641 vlen = Py_SIZE(vt);
642 wlen = Py_SIZE(wt);
643
644 /* Note: the corresponding code for lists has an "early out" test
645 * here when op is EQ or NE and the lengths differ. That pays there,
646 * but Tim was unable to find any real code where EQ/NE tuple
647 * compares don't have the same length, so testing for it here would
648 * have cost without benefit.
649 */
650
651 /* Search for the first index where items are different.
652 * Note that because tuples are immutable, it's safe to reuse
653 * vlen and wlen across the comparison calls.
654 */
655 for (i = 0; i < vlen && i < wlen; i++) {
656 int k = PyObject_RichCompareBool(vt->ob_item[i],
657 wt->ob_item[i], Py_EQ);
658 if (k < 0)
659 return NULL;
660 if (!k)
661 break;
662 }
663
664 if (i >= vlen || i >= wlen) {
665 /* No more items to compare -- compare sizes */
666 Py_RETURN_RICHCOMPARE(vlen, wlen, op);
667 }
668
669 /* We have an item that differs -- shortcuts for EQ/NE */
670 if (op == Py_EQ) {
671 Py_RETURN_FALSE;
672 }
673 if (op == Py_NE) {
674 Py_RETURN_TRUE;
675 }
676
677 /* Compare the final item again using the proper operator */
678 return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
679 }
680
681 static PyObject *
682 tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
683
684 /*[clinic input]
685 @classmethod
686 tuple.__new__ as tuple_new
687 iterable: object(c_default="NULL") = ()
688 /
689
690 Built-in immutable sequence.
691
692 If no argument is given, the constructor returns an empty tuple.
693 If iterable is specified the tuple is initialized from iterable's items.
694
695 If the argument is a tuple, the return value is the same object.
696 [clinic start generated code]*/
697
698 static PyObject *
tuple_new_impl(PyTypeObject * type,PyObject * iterable)699 tuple_new_impl(PyTypeObject *type, PyObject *iterable)
700 /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
701 {
702 if (type != &PyTuple_Type)
703 return tuple_subtype_new(type, iterable);
704
705 if (iterable == NULL)
706 return PyTuple_New(0);
707 else
708 return PySequence_Tuple(iterable);
709 }
710
711 static PyObject *
tuple_vectorcall(PyObject * type,PyObject * const * args,size_t nargsf,PyObject * kwnames)712 tuple_vectorcall(PyObject *type, PyObject * const*args,
713 size_t nargsf, PyObject *kwnames)
714 {
715 if (!_PyArg_NoKwnames("tuple", kwnames)) {
716 return NULL;
717 }
718
719 Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
720 if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) {
721 return NULL;
722 }
723
724 if (nargs) {
725 return tuple_new_impl((PyTypeObject *)type, args[0]);
726 }
727 return PyTuple_New(0);
728 }
729
730 static PyObject *
tuple_subtype_new(PyTypeObject * type,PyObject * iterable)731 tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
732 {
733 PyObject *tmp, *newobj, *item;
734 Py_ssize_t i, n;
735
736 assert(PyType_IsSubtype(type, &PyTuple_Type));
737 tmp = tuple_new_impl(&PyTuple_Type, iterable);
738 if (tmp == NULL)
739 return NULL;
740 assert(PyTuple_Check(tmp));
741 newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
742 if (newobj == NULL) {
743 Py_DECREF(tmp);
744 return NULL;
745 }
746 for (i = 0; i < n; i++) {
747 item = PyTuple_GET_ITEM(tmp, i);
748 Py_INCREF(item);
749 PyTuple_SET_ITEM(newobj, i, item);
750 }
751 Py_DECREF(tmp);
752 return newobj;
753 }
754
755 static PySequenceMethods tuple_as_sequence = {
756 (lenfunc)tuplelength, /* sq_length */
757 (binaryfunc)tupleconcat, /* sq_concat */
758 (ssizeargfunc)tuplerepeat, /* sq_repeat */
759 (ssizeargfunc)tupleitem, /* sq_item */
760 0, /* sq_slice */
761 0, /* sq_ass_item */
762 0, /* sq_ass_slice */
763 (objobjproc)tuplecontains, /* sq_contains */
764 };
765
766 static PyObject*
tuplesubscript(PyTupleObject * self,PyObject * item)767 tuplesubscript(PyTupleObject* self, PyObject* item)
768 {
769 if (_PyIndex_Check(item)) {
770 Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
771 if (i == -1 && PyErr_Occurred())
772 return NULL;
773 if (i < 0)
774 i += PyTuple_GET_SIZE(self);
775 return tupleitem(self, i);
776 }
777 else if (PySlice_Check(item)) {
778 Py_ssize_t start, stop, step, slicelength, i;
779 size_t cur;
780 PyObject* it;
781 PyObject **src, **dest;
782
783 if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
784 return NULL;
785 }
786 slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
787 &stop, step);
788
789 if (slicelength <= 0) {
790 return PyTuple_New(0);
791 }
792 else if (start == 0 && step == 1 &&
793 slicelength == PyTuple_GET_SIZE(self) &&
794 PyTuple_CheckExact(self)) {
795 Py_INCREF(self);
796 return (PyObject *)self;
797 }
798 else {
799 PyTupleObject* result = tuple_alloc(slicelength);
800 if (!result) return NULL;
801
802 src = self->ob_item;
803 dest = result->ob_item;
804 for (cur = start, i = 0; i < slicelength;
805 cur += step, i++) {
806 it = src[cur];
807 Py_INCREF(it);
808 dest[i] = it;
809 }
810
811 tuple_gc_track(result);
812 return (PyObject *)result;
813 }
814 }
815 else {
816 PyErr_Format(PyExc_TypeError,
817 "tuple indices must be integers or slices, not %.200s",
818 Py_TYPE(item)->tp_name);
819 return NULL;
820 }
821 }
822
823 /*[clinic input]
824 tuple.__getnewargs__
825 [clinic start generated code]*/
826
827 static PyObject *
tuple___getnewargs___impl(PyTupleObject * self)828 tuple___getnewargs___impl(PyTupleObject *self)
829 /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
830 {
831 return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self)));
832 }
833
834 static PyMethodDef tuple_methods[] = {
835 TUPLE___GETNEWARGS___METHODDEF
836 TUPLE_INDEX_METHODDEF
837 TUPLE_COUNT_METHODDEF
838 {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
839 {NULL, NULL} /* sentinel */
840 };
841
842 static PyMappingMethods tuple_as_mapping = {
843 (lenfunc)tuplelength,
844 (binaryfunc)tuplesubscript,
845 0
846 };
847
848 static PyObject *tuple_iter(PyObject *seq);
849
850 PyTypeObject PyTuple_Type = {
851 PyVarObject_HEAD_INIT(&PyType_Type, 0)
852 "tuple",
853 sizeof(PyTupleObject) - sizeof(PyObject *),
854 sizeof(PyObject *),
855 (destructor)tupledealloc, /* tp_dealloc */
856 0, /* tp_vectorcall_offset */
857 0, /* tp_getattr */
858 0, /* tp_setattr */
859 0, /* tp_as_async */
860 (reprfunc)tuplerepr, /* tp_repr */
861 0, /* tp_as_number */
862 &tuple_as_sequence, /* tp_as_sequence */
863 &tuple_as_mapping, /* tp_as_mapping */
864 (hashfunc)tuplehash, /* tp_hash */
865 0, /* tp_call */
866 0, /* tp_str */
867 PyObject_GenericGetAttr, /* tp_getattro */
868 0, /* tp_setattro */
869 0, /* tp_as_buffer */
870 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
871 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS, /* tp_flags */
872 tuple_new__doc__, /* tp_doc */
873 (traverseproc)tupletraverse, /* tp_traverse */
874 0, /* tp_clear */
875 tuplerichcompare, /* tp_richcompare */
876 0, /* tp_weaklistoffset */
877 tuple_iter, /* tp_iter */
878 0, /* tp_iternext */
879 tuple_methods, /* tp_methods */
880 0, /* tp_members */
881 0, /* tp_getset */
882 0, /* tp_base */
883 0, /* tp_dict */
884 0, /* tp_descr_get */
885 0, /* tp_descr_set */
886 0, /* tp_dictoffset */
887 0, /* tp_init */
888 0, /* tp_alloc */
889 tuple_new, /* tp_new */
890 PyObject_GC_Del, /* tp_free */
891 .tp_vectorcall = tuple_vectorcall,
892 };
893
894 /* The following function breaks the notion that tuples are immutable:
895 it changes the size of a tuple. We get away with this only if there
896 is only one module referencing the object. You can also think of it
897 as creating a new tuple object and destroying the old one, only more
898 efficiently. In any case, don't use this if the tuple may already be
899 known to some other part of the code. */
900
901 int
_PyTuple_Resize(PyObject ** pv,Py_ssize_t newsize)902 _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
903 {
904 PyTupleObject *v;
905 PyTupleObject *sv;
906 Py_ssize_t i;
907 Py_ssize_t oldsize;
908
909 v = (PyTupleObject *) *pv;
910 if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) ||
911 (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) {
912 *pv = 0;
913 Py_XDECREF(v);
914 PyErr_BadInternalCall();
915 return -1;
916 }
917 oldsize = Py_SIZE(v);
918 if (oldsize == newsize)
919 return 0;
920
921 if (oldsize == 0) {
922 /* Empty tuples are often shared, so we should never
923 resize them in-place even if we do own the only
924 (current) reference */
925 Py_DECREF(v);
926 *pv = PyTuple_New(newsize);
927 return *pv == NULL ? -1 : 0;
928 }
929
930 /* XXX UNREF/NEWREF interface should be more symmetrical */
931 #ifdef Py_REF_DEBUG
932 _Py_RefTotal--;
933 #endif
934 if (_PyObject_GC_IS_TRACKED(v)) {
935 _PyObject_GC_UNTRACK(v);
936 }
937 #ifdef Py_TRACE_REFS
938 _Py_ForgetReference((PyObject *) v);
939 #endif
940 /* DECREF items deleted by shrinkage */
941 for (i = newsize; i < oldsize; i++) {
942 Py_CLEAR(v->ob_item[i]);
943 }
944 sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
945 if (sv == NULL) {
946 *pv = NULL;
947 PyObject_GC_Del(v);
948 return -1;
949 }
950 _Py_NewReference((PyObject *) sv);
951 /* Zero out items added by growing */
952 if (newsize > oldsize)
953 memset(&sv->ob_item[oldsize], 0,
954 sizeof(*sv->ob_item) * (newsize - oldsize));
955 *pv = (PyObject *) sv;
956 _PyObject_GC_TRACK(sv);
957 return 0;
958 }
959
960 void
_PyTuple_ClearFreeList(void)961 _PyTuple_ClearFreeList(void)
962 {
963 #if PyTuple_MAXSAVESIZE > 0
964 for (Py_ssize_t i = 1; i < PyTuple_MAXSAVESIZE; i++) {
965 PyTupleObject *p = free_list[i];
966 free_list[i] = NULL;
967 numfree[i] = 0;
968 while (p) {
969 PyTupleObject *q = p;
970 p = (PyTupleObject *)(p->ob_item[0]);
971 PyObject_GC_Del(q);
972 }
973 }
974 // the empty tuple singleton is only cleared by _PyTuple_Fini()
975 #endif
976 }
977
978 void
_PyTuple_Fini(void)979 _PyTuple_Fini(void)
980 {
981 #if PyTuple_MAXSAVESIZE > 0
982 /* empty tuples are used all over the place and applications may
983 * rely on the fact that an empty tuple is a singleton. */
984 Py_CLEAR(free_list[0]);
985
986 _PyTuple_ClearFreeList();
987 #endif
988 }
989
990 /*********************** Tuple Iterator **************************/
991
992 typedef struct {
993 PyObject_HEAD
994 Py_ssize_t it_index;
995 PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
996 } tupleiterobject;
997
998 static void
tupleiter_dealloc(tupleiterobject * it)999 tupleiter_dealloc(tupleiterobject *it)
1000 {
1001 _PyObject_GC_UNTRACK(it);
1002 Py_XDECREF(it->it_seq);
1003 PyObject_GC_Del(it);
1004 }
1005
1006 static int
tupleiter_traverse(tupleiterobject * it,visitproc visit,void * arg)1007 tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
1008 {
1009 Py_VISIT(it->it_seq);
1010 return 0;
1011 }
1012
1013 static PyObject *
tupleiter_next(tupleiterobject * it)1014 tupleiter_next(tupleiterobject *it)
1015 {
1016 PyTupleObject *seq;
1017 PyObject *item;
1018
1019 assert(it != NULL);
1020 seq = it->it_seq;
1021 if (seq == NULL)
1022 return NULL;
1023 assert(PyTuple_Check(seq));
1024
1025 if (it->it_index < PyTuple_GET_SIZE(seq)) {
1026 item = PyTuple_GET_ITEM(seq, it->it_index);
1027 ++it->it_index;
1028 Py_INCREF(item);
1029 return item;
1030 }
1031
1032 it->it_seq = NULL;
1033 Py_DECREF(seq);
1034 return NULL;
1035 }
1036
1037 static PyObject *
tupleiter_len(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1038 tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1039 {
1040 Py_ssize_t len = 0;
1041 if (it->it_seq)
1042 len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1043 return PyLong_FromSsize_t(len);
1044 }
1045
1046 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1047
1048 static PyObject *
tupleiter_reduce(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1049 tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1050 {
1051 _Py_IDENTIFIER(iter);
1052 if (it->it_seq)
1053 return Py_BuildValue("N(O)n", _PyEval_GetBuiltinId(&PyId_iter),
1054 it->it_seq, it->it_index);
1055 else
1056 return Py_BuildValue("N(())", _PyEval_GetBuiltinId(&PyId_iter));
1057 }
1058
1059 static PyObject *
tupleiter_setstate(tupleiterobject * it,PyObject * state)1060 tupleiter_setstate(tupleiterobject *it, PyObject *state)
1061 {
1062 Py_ssize_t index = PyLong_AsSsize_t(state);
1063 if (index == -1 && PyErr_Occurred())
1064 return NULL;
1065 if (it->it_seq != NULL) {
1066 if (index < 0)
1067 index = 0;
1068 else if (index > PyTuple_GET_SIZE(it->it_seq))
1069 index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1070 it->it_index = index;
1071 }
1072 Py_RETURN_NONE;
1073 }
1074
1075 PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1076 PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1077
1078 static PyMethodDef tupleiter_methods[] = {
1079 {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1080 {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1081 {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1082 {NULL, NULL} /* sentinel */
1083 };
1084
1085 PyTypeObject PyTupleIter_Type = {
1086 PyVarObject_HEAD_INIT(&PyType_Type, 0)
1087 "tuple_iterator", /* tp_name */
1088 sizeof(tupleiterobject), /* tp_basicsize */
1089 0, /* tp_itemsize */
1090 /* methods */
1091 (destructor)tupleiter_dealloc, /* tp_dealloc */
1092 0, /* tp_vectorcall_offset */
1093 0, /* tp_getattr */
1094 0, /* tp_setattr */
1095 0, /* tp_as_async */
1096 0, /* tp_repr */
1097 0, /* tp_as_number */
1098 0, /* tp_as_sequence */
1099 0, /* tp_as_mapping */
1100 0, /* tp_hash */
1101 0, /* tp_call */
1102 0, /* tp_str */
1103 PyObject_GenericGetAttr, /* tp_getattro */
1104 0, /* tp_setattro */
1105 0, /* tp_as_buffer */
1106 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1107 0, /* tp_doc */
1108 (traverseproc)tupleiter_traverse, /* tp_traverse */
1109 0, /* tp_clear */
1110 0, /* tp_richcompare */
1111 0, /* tp_weaklistoffset */
1112 PyObject_SelfIter, /* tp_iter */
1113 (iternextfunc)tupleiter_next, /* tp_iternext */
1114 tupleiter_methods, /* tp_methods */
1115 0,
1116 };
1117
1118 static PyObject *
tuple_iter(PyObject * seq)1119 tuple_iter(PyObject *seq)
1120 {
1121 tupleiterobject *it;
1122
1123 if (!PyTuple_Check(seq)) {
1124 PyErr_BadInternalCall();
1125 return NULL;
1126 }
1127 it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
1128 if (it == NULL)
1129 return NULL;
1130 it->it_index = 0;
1131 Py_INCREF(seq);
1132 it->it_seq = (PyTupleObject *)seq;
1133 _PyObject_GC_TRACK(it);
1134 return (PyObject *)it;
1135 }
1136