• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 
2 /* Tuple object implementation */
3 
4 #include "Python.h"
5 #include "pycore_abstract.h"      // _PyIndex_Check()
6 #include "pycore_ceval.h"         // _PyEval_GetBuiltin()
7 #include "pycore_gc.h"            // _PyObject_GC_IS_TRACKED()
8 #include "pycore_initconfig.h"    // _PyStatus_OK()
9 #include "pycore_modsupport.h"    // _PyArg_NoKwnames()
10 #include "pycore_object.h"        // _PyObject_GC_TRACK(), _Py_FatalRefcountError(), _PyDebugAllocatorStats()
11 
12 /*[clinic input]
13 class tuple "PyTupleObject *" "&PyTuple_Type"
14 [clinic start generated code]*/
15 /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
16 
17 #include "clinic/tupleobject.c.h"
18 
19 
20 static inline PyTupleObject * maybe_freelist_pop(Py_ssize_t);
21 static inline int maybe_freelist_push(PyTupleObject *);
22 
23 
24 /* Allocate an uninitialized tuple object. Before making it public, following
25    steps must be done:
26 
27    - Initialize its items.
28    - Call _PyObject_GC_TRACK() on it.
29 
30    Because the empty tuple is always reused and it's already tracked by GC,
31    this function must not be called with size == 0 (unless from PyTuple_New()
32    which wraps this function).
33 */
34 static PyTupleObject *
tuple_alloc(Py_ssize_t size)35 tuple_alloc(Py_ssize_t size)
36 {
37     if (size < 0) {
38         PyErr_BadInternalCall();
39         return NULL;
40     }
41 #ifdef Py_DEBUG
42     assert(size != 0);    // The empty tuple is statically allocated.
43 #endif
44 
45     PyTupleObject *op = maybe_freelist_pop(size);
46     if (op == NULL) {
47         /* Check for overflow */
48         if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) -
49                     sizeof(PyObject *))) / sizeof(PyObject *)) {
50             return (PyTupleObject *)PyErr_NoMemory();
51         }
52         op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
53         if (op == NULL)
54             return NULL;
55     }
56     return op;
57 }
58 
59 // The empty tuple singleton is not tracked by the GC.
60 // It does not contain any Python object.
61 // Note that tuple subclasses have their own empty instances.
62 
63 static inline PyObject *
tuple_get_empty(void)64 tuple_get_empty(void)
65 {
66     return (PyObject *)&_Py_SINGLETON(tuple_empty);
67 }
68 
69 PyObject *
PyTuple_New(Py_ssize_t size)70 PyTuple_New(Py_ssize_t size)
71 {
72     PyTupleObject *op;
73     if (size == 0) {
74         return tuple_get_empty();
75     }
76     op = tuple_alloc(size);
77     if (op == NULL) {
78         return NULL;
79     }
80     for (Py_ssize_t i = 0; i < size; i++) {
81         op->ob_item[i] = NULL;
82     }
83     _PyObject_GC_TRACK(op);
84     return (PyObject *) op;
85 }
86 
87 Py_ssize_t
PyTuple_Size(PyObject * op)88 PyTuple_Size(PyObject *op)
89 {
90     if (!PyTuple_Check(op)) {
91         PyErr_BadInternalCall();
92         return -1;
93     }
94     else
95         return Py_SIZE(op);
96 }
97 
98 PyObject *
PyTuple_GetItem(PyObject * op,Py_ssize_t i)99 PyTuple_GetItem(PyObject *op, Py_ssize_t i)
100 {
101     if (!PyTuple_Check(op)) {
102         PyErr_BadInternalCall();
103         return NULL;
104     }
105     if (i < 0 || i >= Py_SIZE(op)) {
106         PyErr_SetString(PyExc_IndexError, "tuple index out of range");
107         return NULL;
108     }
109     return ((PyTupleObject *)op) -> ob_item[i];
110 }
111 
112 int
PyTuple_SetItem(PyObject * op,Py_ssize_t i,PyObject * newitem)113 PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
114 {
115     PyObject **p;
116     if (!PyTuple_Check(op) || Py_REFCNT(op) != 1) {
117         Py_XDECREF(newitem);
118         PyErr_BadInternalCall();
119         return -1;
120     }
121     if (i < 0 || i >= Py_SIZE(op)) {
122         Py_XDECREF(newitem);
123         PyErr_SetString(PyExc_IndexError,
124                         "tuple assignment index out of range");
125         return -1;
126     }
127     p = ((PyTupleObject *)op) -> ob_item + i;
128     Py_XSETREF(*p, newitem);
129     return 0;
130 }
131 
132 void
_PyTuple_MaybeUntrack(PyObject * op)133 _PyTuple_MaybeUntrack(PyObject *op)
134 {
135     PyTupleObject *t;
136     Py_ssize_t i, n;
137 
138     if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
139         return;
140     t = (PyTupleObject *) op;
141     n = Py_SIZE(t);
142     for (i = 0; i < n; i++) {
143         PyObject *elt = PyTuple_GET_ITEM(t, i);
144         /* Tuple with NULL elements aren't
145            fully constructed, don't untrack
146            them yet. */
147         if (!elt ||
148             _PyObject_GC_MAY_BE_TRACKED(elt))
149             return;
150     }
151     _PyObject_GC_UNTRACK(op);
152 }
153 
154 PyObject *
PyTuple_Pack(Py_ssize_t n,...)155 PyTuple_Pack(Py_ssize_t n, ...)
156 {
157     Py_ssize_t i;
158     PyObject *o;
159     PyObject **items;
160     va_list vargs;
161 
162     if (n == 0) {
163         return tuple_get_empty();
164     }
165 
166     va_start(vargs, n);
167     PyTupleObject *result = tuple_alloc(n);
168     if (result == NULL) {
169         va_end(vargs);
170         return NULL;
171     }
172     items = result->ob_item;
173     for (i = 0; i < n; i++) {
174         o = va_arg(vargs, PyObject *);
175         items[i] = Py_NewRef(o);
176     }
177     va_end(vargs);
178     _PyObject_GC_TRACK(result);
179     return (PyObject *)result;
180 }
181 
182 
183 /* Methods */
184 
185 static void
tupledealloc(PyTupleObject * op)186 tupledealloc(PyTupleObject *op)
187 {
188     if (Py_SIZE(op) == 0) {
189         /* The empty tuple is statically allocated. */
190         if (op == &_Py_SINGLETON(tuple_empty)) {
191 #ifdef Py_DEBUG
192             _Py_FatalRefcountError("deallocating the empty tuple singleton");
193 #else
194             return;
195 #endif
196         }
197 #ifdef Py_DEBUG
198         /* tuple subclasses have their own empty instances. */
199         assert(!PyTuple_CheckExact(op));
200 #endif
201     }
202 
203     PyObject_GC_UnTrack(op);
204     Py_TRASHCAN_BEGIN(op, tupledealloc)
205 
206     Py_ssize_t i = Py_SIZE(op);
207     while (--i >= 0) {
208         Py_XDECREF(op->ob_item[i]);
209     }
210     // This will abort on the empty singleton (if there is one).
211     if (!maybe_freelist_push(op)) {
212         Py_TYPE(op)->tp_free((PyObject *)op);
213     }
214 
215     Py_TRASHCAN_END
216 }
217 
218 static PyObject *
tuplerepr(PyTupleObject * v)219 tuplerepr(PyTupleObject *v)
220 {
221     Py_ssize_t i, n;
222     _PyUnicodeWriter writer;
223 
224     n = Py_SIZE(v);
225     if (n == 0)
226         return PyUnicode_FromString("()");
227 
228     /* While not mutable, it is still possible to end up with a cycle in a
229        tuple through an object that stores itself within a tuple (and thus
230        infinitely asks for the repr of itself). This should only be
231        possible within a type. */
232     i = Py_ReprEnter((PyObject *)v);
233     if (i != 0) {
234         return i > 0 ? PyUnicode_FromString("(...)") : NULL;
235     }
236 
237     _PyUnicodeWriter_Init(&writer);
238     writer.overallocate = 1;
239     if (Py_SIZE(v) > 1) {
240         /* "(" + "1" + ", 2" * (len - 1) + ")" */
241         writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
242     }
243     else {
244         /* "(1,)" */
245         writer.min_length = 4;
246     }
247 
248     if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
249         goto error;
250 
251     /* Do repr() on each element. */
252     for (i = 0; i < n; ++i) {
253         PyObject *s;
254 
255         if (i > 0) {
256             if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
257                 goto error;
258         }
259 
260         s = PyObject_Repr(v->ob_item[i]);
261         if (s == NULL)
262             goto error;
263 
264         if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
265             Py_DECREF(s);
266             goto error;
267         }
268         Py_DECREF(s);
269     }
270 
271     writer.overallocate = 0;
272     if (n > 1) {
273         if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
274             goto error;
275     }
276     else {
277         if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
278             goto error;
279     }
280 
281     Py_ReprLeave((PyObject *)v);
282     return _PyUnicodeWriter_Finish(&writer);
283 
284 error:
285     _PyUnicodeWriter_Dealloc(&writer);
286     Py_ReprLeave((PyObject *)v);
287     return NULL;
288 }
289 
290 
291 /* Hash for tuples. This is a slightly simplified version of the xxHash
292    non-cryptographic hash:
293    - we do not use any parallelism, there is only 1 accumulator.
294    - we drop the final mixing since this is just a permutation of the
295      output space: it does not help against collisions.
296    - at the end, we mangle the length with a single constant.
297    For the xxHash specification, see
298    https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
299 
300    Below are the official constants from the xxHash specification. Optimizing
301    compilers should emit a single "rotate" instruction for the
302    _PyHASH_XXROTATE() expansion. If that doesn't happen for some important
303    platform, the macro could be changed to expand to a platform-specific rotate
304    spelling instead.
305 */
306 #if SIZEOF_PY_UHASH_T > 4
307 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
308 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
309 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
310 #define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33))  /* Rotate left 31 bits */
311 #else
312 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
313 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
314 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
315 #define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19))  /* Rotate left 13 bits */
316 #endif
317 
318 /* Tests have shown that it's not worth to cache the hash value, see
319    https://bugs.python.org/issue9685 */
320 static Py_hash_t
tuplehash(PyTupleObject * v)321 tuplehash(PyTupleObject *v)
322 {
323     Py_ssize_t i, len = Py_SIZE(v);
324     PyObject **item = v->ob_item;
325 
326     Py_uhash_t acc = _PyHASH_XXPRIME_5;
327     for (i = 0; i < len; i++) {
328         Py_uhash_t lane = PyObject_Hash(item[i]);
329         if (lane == (Py_uhash_t)-1) {
330             return -1;
331         }
332         acc += lane * _PyHASH_XXPRIME_2;
333         acc = _PyHASH_XXROTATE(acc);
334         acc *= _PyHASH_XXPRIME_1;
335     }
336 
337     /* Add input length, mangled to keep the historical value of hash(()). */
338     acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL);
339 
340     if (acc == (Py_uhash_t)-1) {
341         return 1546275796;
342     }
343     return acc;
344 }
345 
346 static Py_ssize_t
tuplelength(PyTupleObject * a)347 tuplelength(PyTupleObject *a)
348 {
349     return Py_SIZE(a);
350 }
351 
352 static int
tuplecontains(PyTupleObject * a,PyObject * el)353 tuplecontains(PyTupleObject *a, PyObject *el)
354 {
355     Py_ssize_t i;
356     int cmp;
357 
358     for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i)
359         cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ);
360     return cmp;
361 }
362 
363 static PyObject *
tupleitem(PyTupleObject * a,Py_ssize_t i)364 tupleitem(PyTupleObject *a, Py_ssize_t i)
365 {
366     if (i < 0 || i >= Py_SIZE(a)) {
367         PyErr_SetString(PyExc_IndexError, "tuple index out of range");
368         return NULL;
369     }
370     return Py_NewRef(a->ob_item[i]);
371 }
372 
373 PyObject *
_PyTuple_FromArray(PyObject * const * src,Py_ssize_t n)374 _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
375 {
376     if (n == 0) {
377         return tuple_get_empty();
378     }
379 
380     PyTupleObject *tuple = tuple_alloc(n);
381     if (tuple == NULL) {
382         return NULL;
383     }
384     PyObject **dst = tuple->ob_item;
385     for (Py_ssize_t i = 0; i < n; i++) {
386         PyObject *item = src[i];
387         dst[i] = Py_NewRef(item);
388     }
389     _PyObject_GC_TRACK(tuple);
390     return (PyObject *)tuple;
391 }
392 
393 PyObject *
_PyTuple_FromArraySteal(PyObject * const * src,Py_ssize_t n)394 _PyTuple_FromArraySteal(PyObject *const *src, Py_ssize_t n)
395 {
396     if (n == 0) {
397         return tuple_get_empty();
398     }
399     PyTupleObject *tuple = tuple_alloc(n);
400     if (tuple == NULL) {
401         for (Py_ssize_t i = 0; i < n; i++) {
402             Py_DECREF(src[i]);
403         }
404         return NULL;
405     }
406     PyObject **dst = tuple->ob_item;
407     for (Py_ssize_t i = 0; i < n; i++) {
408         PyObject *item = src[i];
409         dst[i] = item;
410     }
411     _PyObject_GC_TRACK(tuple);
412     return (PyObject *)tuple;
413 }
414 
415 static PyObject *
tupleslice(PyTupleObject * a,Py_ssize_t ilow,Py_ssize_t ihigh)416 tupleslice(PyTupleObject *a, Py_ssize_t ilow,
417            Py_ssize_t ihigh)
418 {
419     if (ilow < 0)
420         ilow = 0;
421     if (ihigh > Py_SIZE(a))
422         ihigh = Py_SIZE(a);
423     if (ihigh < ilow)
424         ihigh = ilow;
425     if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
426         return Py_NewRef(a);
427     }
428     return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
429 }
430 
431 PyObject *
PyTuple_GetSlice(PyObject * op,Py_ssize_t i,Py_ssize_t j)432 PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
433 {
434     if (op == NULL || !PyTuple_Check(op)) {
435         PyErr_BadInternalCall();
436         return NULL;
437     }
438     return tupleslice((PyTupleObject *)op, i, j);
439 }
440 
441 static PyObject *
tupleconcat(PyTupleObject * a,PyObject * bb)442 tupleconcat(PyTupleObject *a, PyObject *bb)
443 {
444     Py_ssize_t size;
445     Py_ssize_t i;
446     PyObject **src, **dest;
447     PyTupleObject *np;
448     if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
449         return Py_NewRef(bb);
450     }
451     if (!PyTuple_Check(bb)) {
452         PyErr_Format(PyExc_TypeError,
453              "can only concatenate tuple (not \"%.200s\") to tuple",
454                  Py_TYPE(bb)->tp_name);
455         return NULL;
456     }
457     PyTupleObject *b = (PyTupleObject *)bb;
458 
459     if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
460         return Py_NewRef(a);
461     }
462     assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX);
463     size = Py_SIZE(a) + Py_SIZE(b);
464     if (size == 0) {
465         return tuple_get_empty();
466     }
467 
468     np = tuple_alloc(size);
469     if (np == NULL) {
470         return NULL;
471     }
472     src = a->ob_item;
473     dest = np->ob_item;
474     for (i = 0; i < Py_SIZE(a); i++) {
475         PyObject *v = src[i];
476         dest[i] = Py_NewRef(v);
477     }
478     src = b->ob_item;
479     dest = np->ob_item + Py_SIZE(a);
480     for (i = 0; i < Py_SIZE(b); i++) {
481         PyObject *v = src[i];
482         dest[i] = Py_NewRef(v);
483     }
484     _PyObject_GC_TRACK(np);
485     return (PyObject *)np;
486 }
487 
488 static PyObject *
tuplerepeat(PyTupleObject * a,Py_ssize_t n)489 tuplerepeat(PyTupleObject *a, Py_ssize_t n)
490 {
491     const Py_ssize_t input_size = Py_SIZE(a);
492     if (input_size == 0 || n == 1) {
493         if (PyTuple_CheckExact(a)) {
494             /* Since tuples are immutable, we can return a shared
495                copy in this case */
496             return Py_NewRef(a);
497         }
498     }
499     if (input_size == 0 || n <= 0) {
500         return tuple_get_empty();
501     }
502     assert(n>0);
503 
504     if (input_size > PY_SSIZE_T_MAX / n)
505         return PyErr_NoMemory();
506     Py_ssize_t output_size = input_size * n;
507 
508     PyTupleObject *np = tuple_alloc(output_size);
509     if (np == NULL)
510         return NULL;
511 
512     PyObject **dest = np->ob_item;
513     if (input_size == 1) {
514         PyObject *elem = a->ob_item[0];
515         _Py_RefcntAdd(elem, n);
516         PyObject **dest_end = dest + output_size;
517         while (dest < dest_end) {
518             *dest++ = elem;
519         }
520     }
521     else {
522         PyObject **src = a->ob_item;
523         PyObject **src_end = src + input_size;
524         while (src < src_end) {
525             _Py_RefcntAdd(*src, n);
526             *dest++ = *src++;
527         }
528 
529         _Py_memory_repeat((char *)np->ob_item, sizeof(PyObject *)*output_size,
530                           sizeof(PyObject *)*input_size);
531     }
532     _PyObject_GC_TRACK(np);
533     return (PyObject *) np;
534 }
535 
536 /*[clinic input]
537 tuple.index
538 
539     value: object
540     start: slice_index(accept={int}) = 0
541     stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
542     /
543 
544 Return first index of value.
545 
546 Raises ValueError if the value is not present.
547 [clinic start generated code]*/
548 
549 static PyObject *
tuple_index_impl(PyTupleObject * self,PyObject * value,Py_ssize_t start,Py_ssize_t stop)550 tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
551                  Py_ssize_t stop)
552 /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
553 {
554     Py_ssize_t i;
555 
556     if (start < 0) {
557         start += Py_SIZE(self);
558         if (start < 0)
559             start = 0;
560     }
561     if (stop < 0) {
562         stop += Py_SIZE(self);
563     }
564     else if (stop > Py_SIZE(self)) {
565         stop = Py_SIZE(self);
566     }
567     for (i = start; i < stop; i++) {
568         int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
569         if (cmp > 0)
570             return PyLong_FromSsize_t(i);
571         else if (cmp < 0)
572             return NULL;
573     }
574     PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
575     return NULL;
576 }
577 
578 /*[clinic input]
579 tuple.count
580 
581      value: object
582      /
583 
584 Return number of occurrences of value.
585 [clinic start generated code]*/
586 
587 static PyObject *
tuple_count(PyTupleObject * self,PyObject * value)588 tuple_count(PyTupleObject *self, PyObject *value)
589 /*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/
590 {
591     Py_ssize_t count = 0;
592     Py_ssize_t i;
593 
594     for (i = 0; i < Py_SIZE(self); i++) {
595         int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
596         if (cmp > 0)
597             count++;
598         else if (cmp < 0)
599             return NULL;
600     }
601     return PyLong_FromSsize_t(count);
602 }
603 
604 static int
tupletraverse(PyTupleObject * o,visitproc visit,void * arg)605 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
606 {
607     Py_ssize_t i;
608 
609     for (i = Py_SIZE(o); --i >= 0; )
610         Py_VISIT(o->ob_item[i]);
611     return 0;
612 }
613 
614 static PyObject *
tuplerichcompare(PyObject * v,PyObject * w,int op)615 tuplerichcompare(PyObject *v, PyObject *w, int op)
616 {
617     PyTupleObject *vt, *wt;
618     Py_ssize_t i;
619     Py_ssize_t vlen, wlen;
620 
621     if (!PyTuple_Check(v) || !PyTuple_Check(w))
622         Py_RETURN_NOTIMPLEMENTED;
623 
624     vt = (PyTupleObject *)v;
625     wt = (PyTupleObject *)w;
626 
627     vlen = Py_SIZE(vt);
628     wlen = Py_SIZE(wt);
629 
630     /* Note:  the corresponding code for lists has an "early out" test
631      * here when op is EQ or NE and the lengths differ.  That pays there,
632      * but Tim was unable to find any real code where EQ/NE tuple
633      * compares don't have the same length, so testing for it here would
634      * have cost without benefit.
635      */
636 
637     /* Search for the first index where items are different.
638      * Note that because tuples are immutable, it's safe to reuse
639      * vlen and wlen across the comparison calls.
640      */
641     for (i = 0; i < vlen && i < wlen; i++) {
642         int k = PyObject_RichCompareBool(vt->ob_item[i],
643                                          wt->ob_item[i], Py_EQ);
644         if (k < 0)
645             return NULL;
646         if (!k)
647             break;
648     }
649 
650     if (i >= vlen || i >= wlen) {
651         /* No more items to compare -- compare sizes */
652         Py_RETURN_RICHCOMPARE(vlen, wlen, op);
653     }
654 
655     /* We have an item that differs -- shortcuts for EQ/NE */
656     if (op == Py_EQ) {
657         Py_RETURN_FALSE;
658     }
659     if (op == Py_NE) {
660         Py_RETURN_TRUE;
661     }
662 
663     /* Compare the final item again using the proper operator */
664     return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
665 }
666 
667 static PyObject *
668 tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
669 
670 /*[clinic input]
671 @classmethod
672 tuple.__new__ as tuple_new
673     iterable: object(c_default="NULL") = ()
674     /
675 
676 Built-in immutable sequence.
677 
678 If no argument is given, the constructor returns an empty tuple.
679 If iterable is specified the tuple is initialized from iterable's items.
680 
681 If the argument is a tuple, the return value is the same object.
682 [clinic start generated code]*/
683 
684 static PyObject *
tuple_new_impl(PyTypeObject * type,PyObject * iterable)685 tuple_new_impl(PyTypeObject *type, PyObject *iterable)
686 /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
687 {
688     if (type != &PyTuple_Type)
689         return tuple_subtype_new(type, iterable);
690 
691     if (iterable == NULL) {
692         return tuple_get_empty();
693     }
694     else {
695         return PySequence_Tuple(iterable);
696     }
697 }
698 
699 static PyObject *
tuple_vectorcall(PyObject * type,PyObject * const * args,size_t nargsf,PyObject * kwnames)700 tuple_vectorcall(PyObject *type, PyObject * const*args,
701                  size_t nargsf, PyObject *kwnames)
702 {
703     if (!_PyArg_NoKwnames("tuple", kwnames)) {
704         return NULL;
705     }
706 
707     Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
708     if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) {
709         return NULL;
710     }
711 
712     if (nargs) {
713         return tuple_new_impl(_PyType_CAST(type), args[0]);
714     }
715     else {
716         return tuple_get_empty();
717     }
718 }
719 
720 static PyObject *
tuple_subtype_new(PyTypeObject * type,PyObject * iterable)721 tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
722 {
723     PyObject *tmp, *newobj, *item;
724     Py_ssize_t i, n;
725 
726     assert(PyType_IsSubtype(type, &PyTuple_Type));
727     // tuple subclasses must implement the GC protocol
728     assert(_PyType_IS_GC(type));
729 
730     tmp = tuple_new_impl(&PyTuple_Type, iterable);
731     if (tmp == NULL)
732         return NULL;
733     assert(PyTuple_Check(tmp));
734     /* This may allocate an empty tuple that is not the global one. */
735     newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
736     if (newobj == NULL) {
737         Py_DECREF(tmp);
738         return NULL;
739     }
740     for (i = 0; i < n; i++) {
741         item = PyTuple_GET_ITEM(tmp, i);
742         PyTuple_SET_ITEM(newobj, i, Py_NewRef(item));
743     }
744     Py_DECREF(tmp);
745 
746     // Don't track if a subclass tp_alloc is PyType_GenericAlloc()
747     if (!_PyObject_GC_IS_TRACKED(newobj)) {
748         _PyObject_GC_TRACK(newobj);
749     }
750     return newobj;
751 }
752 
753 static PySequenceMethods tuple_as_sequence = {
754     (lenfunc)tuplelength,                       /* sq_length */
755     (binaryfunc)tupleconcat,                    /* sq_concat */
756     (ssizeargfunc)tuplerepeat,                  /* sq_repeat */
757     (ssizeargfunc)tupleitem,                    /* sq_item */
758     0,                                          /* sq_slice */
759     0,                                          /* sq_ass_item */
760     0,                                          /* sq_ass_slice */
761     (objobjproc)tuplecontains,                  /* sq_contains */
762 };
763 
764 static PyObject*
tuplesubscript(PyTupleObject * self,PyObject * item)765 tuplesubscript(PyTupleObject* self, PyObject* item)
766 {
767     if (_PyIndex_Check(item)) {
768         Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
769         if (i == -1 && PyErr_Occurred())
770             return NULL;
771         if (i < 0)
772             i += PyTuple_GET_SIZE(self);
773         return tupleitem(self, i);
774     }
775     else if (PySlice_Check(item)) {
776         Py_ssize_t start, stop, step, slicelength, i;
777         size_t cur;
778         PyObject* it;
779         PyObject **src, **dest;
780 
781         if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
782             return NULL;
783         }
784         slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
785                                             &stop, step);
786 
787         if (slicelength <= 0) {
788             return tuple_get_empty();
789         }
790         else if (start == 0 && step == 1 &&
791                  slicelength == PyTuple_GET_SIZE(self) &&
792                  PyTuple_CheckExact(self)) {
793             return Py_NewRef(self);
794         }
795         else {
796             PyTupleObject* result = tuple_alloc(slicelength);
797             if (!result) return NULL;
798 
799             src = self->ob_item;
800             dest = result->ob_item;
801             for (cur = start, i = 0; i < slicelength;
802                  cur += step, i++) {
803                 it = Py_NewRef(src[cur]);
804                 dest[i] = it;
805             }
806 
807             _PyObject_GC_TRACK(result);
808             return (PyObject *)result;
809         }
810     }
811     else {
812         PyErr_Format(PyExc_TypeError,
813                      "tuple indices must be integers or slices, not %.200s",
814                      Py_TYPE(item)->tp_name);
815         return NULL;
816     }
817 }
818 
819 /*[clinic input]
820 tuple.__getnewargs__
821 [clinic start generated code]*/
822 
823 static PyObject *
tuple___getnewargs___impl(PyTupleObject * self)824 tuple___getnewargs___impl(PyTupleObject *self)
825 /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
826 {
827     return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self)));
828 }
829 
830 static PyMethodDef tuple_methods[] = {
831     TUPLE___GETNEWARGS___METHODDEF
832     TUPLE_INDEX_METHODDEF
833     TUPLE_COUNT_METHODDEF
834     {"__class_getitem__", Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
835     {NULL,              NULL}           /* sentinel */
836 };
837 
838 static PyMappingMethods tuple_as_mapping = {
839     (lenfunc)tuplelength,
840     (binaryfunc)tuplesubscript,
841     0
842 };
843 
844 static PyObject *tuple_iter(PyObject *seq);
845 
846 PyTypeObject PyTuple_Type = {
847     PyVarObject_HEAD_INIT(&PyType_Type, 0)
848     "tuple",
849     sizeof(PyTupleObject) - sizeof(PyObject *),
850     sizeof(PyObject *),
851     (destructor)tupledealloc,                   /* tp_dealloc */
852     0,                                          /* tp_vectorcall_offset */
853     0,                                          /* tp_getattr */
854     0,                                          /* tp_setattr */
855     0,                                          /* tp_as_async */
856     (reprfunc)tuplerepr,                        /* tp_repr */
857     0,                                          /* tp_as_number */
858     &tuple_as_sequence,                         /* tp_as_sequence */
859     &tuple_as_mapping,                          /* tp_as_mapping */
860     (hashfunc)tuplehash,                        /* tp_hash */
861     0,                                          /* tp_call */
862     0,                                          /* tp_str */
863     PyObject_GenericGetAttr,                    /* tp_getattro */
864     0,                                          /* tp_setattro */
865     0,                                          /* tp_as_buffer */
866     Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
867         Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS |
868         _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE,  /* tp_flags */
869     tuple_new__doc__,                           /* tp_doc */
870     (traverseproc)tupletraverse,                /* tp_traverse */
871     0,                                          /* tp_clear */
872     tuplerichcompare,                           /* tp_richcompare */
873     0,                                          /* tp_weaklistoffset */
874     tuple_iter,                                 /* tp_iter */
875     0,                                          /* tp_iternext */
876     tuple_methods,                              /* tp_methods */
877     0,                                          /* tp_members */
878     0,                                          /* tp_getset */
879     0,                                          /* tp_base */
880     0,                                          /* tp_dict */
881     0,                                          /* tp_descr_get */
882     0,                                          /* tp_descr_set */
883     0,                                          /* tp_dictoffset */
884     0,                                          /* tp_init */
885     0,                                          /* tp_alloc */
886     tuple_new,                                  /* tp_new */
887     PyObject_GC_Del,                            /* tp_free */
888     .tp_vectorcall = tuple_vectorcall,
889 };
890 
891 /* The following function breaks the notion that tuples are immutable:
892    it changes the size of a tuple.  We get away with this only if there
893    is only one module referencing the object.  You can also think of it
894    as creating a new tuple object and destroying the old one, only more
895    efficiently.  In any case, don't use this if the tuple may already be
896    known to some other part of the code. */
897 
898 int
_PyTuple_Resize(PyObject ** pv,Py_ssize_t newsize)899 _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
900 {
901     PyTupleObject *v;
902     PyTupleObject *sv;
903     Py_ssize_t i;
904     Py_ssize_t oldsize;
905 
906     v = (PyTupleObject *) *pv;
907     if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) ||
908         (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) {
909         *pv = 0;
910         Py_XDECREF(v);
911         PyErr_BadInternalCall();
912         return -1;
913     }
914 
915     oldsize = Py_SIZE(v);
916     if (oldsize == newsize) {
917         return 0;
918     }
919     if (newsize == 0) {
920         Py_DECREF(v);
921         *pv = tuple_get_empty();
922         return 0;
923     }
924     if (oldsize == 0) {
925 #ifdef Py_DEBUG
926         assert(v == &_Py_SINGLETON(tuple_empty));
927 #endif
928         /* The empty tuple is statically allocated so we never
929            resize it in-place. */
930         Py_DECREF(v);
931         *pv = PyTuple_New(newsize);
932         return *pv == NULL ? -1 : 0;
933     }
934 
935     if (_PyObject_GC_IS_TRACKED(v)) {
936         _PyObject_GC_UNTRACK(v);
937     }
938 #ifdef Py_TRACE_REFS
939     _Py_ForgetReference((PyObject *) v);
940 #endif
941     /* DECREF items deleted by shrinkage */
942     for (i = newsize; i < oldsize; i++) {
943         Py_CLEAR(v->ob_item[i]);
944     }
945     sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
946     if (sv == NULL) {
947         *pv = NULL;
948 #ifdef Py_REF_DEBUG
949         _Py_DecRefTotal(_PyThreadState_GET());
950 #endif
951         PyObject_GC_Del(v);
952         return -1;
953     }
954     _Py_NewReferenceNoTotal((PyObject *) sv);
955     /* Zero out items added by growing */
956     if (newsize > oldsize)
957         memset(&sv->ob_item[oldsize], 0,
958                sizeof(*sv->ob_item) * (newsize - oldsize));
959     *pv = (PyObject *) sv;
960     _PyObject_GC_TRACK(sv);
961     return 0;
962 }
963 
964 
965 static void maybe_freelist_clear(struct _Py_object_freelists *, int);
966 
967 
968 void
_PyTuple_ClearFreeList(struct _Py_object_freelists * freelists,int is_finalization)969 _PyTuple_ClearFreeList(struct _Py_object_freelists *freelists, int is_finalization)
970 {
971     maybe_freelist_clear(freelists, is_finalization);
972 }
973 
974 /*********************** Tuple Iterator **************************/
975 
976 
977 static void
tupleiter_dealloc(_PyTupleIterObject * it)978 tupleiter_dealloc(_PyTupleIterObject *it)
979 {
980     _PyObject_GC_UNTRACK(it);
981     Py_XDECREF(it->it_seq);
982     PyObject_GC_Del(it);
983 }
984 
985 static int
tupleiter_traverse(_PyTupleIterObject * it,visitproc visit,void * arg)986 tupleiter_traverse(_PyTupleIterObject *it, visitproc visit, void *arg)
987 {
988     Py_VISIT(it->it_seq);
989     return 0;
990 }
991 
992 static PyObject *
tupleiter_next(_PyTupleIterObject * it)993 tupleiter_next(_PyTupleIterObject *it)
994 {
995     PyTupleObject *seq;
996     PyObject *item;
997 
998     assert(it != NULL);
999     seq = it->it_seq;
1000     if (seq == NULL)
1001         return NULL;
1002     assert(PyTuple_Check(seq));
1003 
1004     if (it->it_index < PyTuple_GET_SIZE(seq)) {
1005         item = PyTuple_GET_ITEM(seq, it->it_index);
1006         ++it->it_index;
1007         return Py_NewRef(item);
1008     }
1009 
1010     it->it_seq = NULL;
1011     Py_DECREF(seq);
1012     return NULL;
1013 }
1014 
1015 static PyObject *
tupleiter_len(_PyTupleIterObject * it,PyObject * Py_UNUSED (ignored))1016 tupleiter_len(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored))
1017 {
1018     Py_ssize_t len = 0;
1019     if (it->it_seq)
1020         len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1021     return PyLong_FromSsize_t(len);
1022 }
1023 
1024 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1025 
1026 static PyObject *
tupleiter_reduce(_PyTupleIterObject * it,PyObject * Py_UNUSED (ignored))1027 tupleiter_reduce(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored))
1028 {
1029     PyObject *iter = _PyEval_GetBuiltin(&_Py_ID(iter));
1030 
1031     /* _PyEval_GetBuiltin can invoke arbitrary code,
1032      * call must be before access of iterator pointers.
1033      * see issue #101765 */
1034 
1035     if (it->it_seq)
1036         return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index);
1037     else
1038         return Py_BuildValue("N(())", iter);
1039 }
1040 
1041 static PyObject *
tupleiter_setstate(_PyTupleIterObject * it,PyObject * state)1042 tupleiter_setstate(_PyTupleIterObject *it, PyObject *state)
1043 {
1044     Py_ssize_t index = PyLong_AsSsize_t(state);
1045     if (index == -1 && PyErr_Occurred())
1046         return NULL;
1047     if (it->it_seq != NULL) {
1048         if (index < 0)
1049             index = 0;
1050         else if (index > PyTuple_GET_SIZE(it->it_seq))
1051             index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1052         it->it_index = index;
1053     }
1054     Py_RETURN_NONE;
1055 }
1056 
1057 PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1058 PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1059 
1060 static PyMethodDef tupleiter_methods[] = {
1061     {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1062     {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1063     {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1064     {NULL,              NULL}           /* sentinel */
1065 };
1066 
1067 PyTypeObject PyTupleIter_Type = {
1068     PyVarObject_HEAD_INIT(&PyType_Type, 0)
1069     "tuple_iterator",                           /* tp_name */
1070     sizeof(_PyTupleIterObject),                    /* tp_basicsize */
1071     0,                                          /* tp_itemsize */
1072     /* methods */
1073     (destructor)tupleiter_dealloc,              /* tp_dealloc */
1074     0,                                          /* tp_vectorcall_offset */
1075     0,                                          /* tp_getattr */
1076     0,                                          /* tp_setattr */
1077     0,                                          /* tp_as_async */
1078     0,                                          /* tp_repr */
1079     0,                                          /* tp_as_number */
1080     0,                                          /* tp_as_sequence */
1081     0,                                          /* tp_as_mapping */
1082     0,                                          /* tp_hash */
1083     0,                                          /* tp_call */
1084     0,                                          /* tp_str */
1085     PyObject_GenericGetAttr,                    /* tp_getattro */
1086     0,                                          /* tp_setattro */
1087     0,                                          /* tp_as_buffer */
1088     Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1089     0,                                          /* tp_doc */
1090     (traverseproc)tupleiter_traverse,           /* tp_traverse */
1091     0,                                          /* tp_clear */
1092     0,                                          /* tp_richcompare */
1093     0,                                          /* tp_weaklistoffset */
1094     PyObject_SelfIter,                          /* tp_iter */
1095     (iternextfunc)tupleiter_next,               /* tp_iternext */
1096     tupleiter_methods,                          /* tp_methods */
1097     0,
1098 };
1099 
1100 static PyObject *
tuple_iter(PyObject * seq)1101 tuple_iter(PyObject *seq)
1102 {
1103     _PyTupleIterObject *it;
1104 
1105     if (!PyTuple_Check(seq)) {
1106         PyErr_BadInternalCall();
1107         return NULL;
1108     }
1109     it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type);
1110     if (it == NULL)
1111         return NULL;
1112     it->it_index = 0;
1113     it->it_seq = (PyTupleObject *)Py_NewRef(seq);
1114     _PyObject_GC_TRACK(it);
1115     return (PyObject *)it;
1116 }
1117 
1118 
1119 /*************
1120  * freelists *
1121  *************/
1122 
1123 #define TUPLE_FREELIST (freelists->tuples)
1124 #define FREELIST_FINALIZED (TUPLE_FREELIST.numfree[0] < 0)
1125 
1126 static inline PyTupleObject *
maybe_freelist_pop(Py_ssize_t size)1127 maybe_freelist_pop(Py_ssize_t size)
1128 {
1129 #ifdef WITH_FREELISTS
1130     struct _Py_object_freelists *freelists = _Py_object_freelists_GET();
1131     if (size == 0) {
1132         return NULL;
1133     }
1134     assert(size > 0);
1135     if (size <= PyTuple_MAXSAVESIZE) {
1136         Py_ssize_t index = size - 1;
1137         PyTupleObject *op = TUPLE_FREELIST.items[index];
1138         if (op != NULL) {
1139             /* op is the head of a linked list, with the first item
1140                pointing to the next node.  Here we pop off the old head. */
1141             TUPLE_FREELIST.items[index] = (PyTupleObject *) op->ob_item[0];
1142             TUPLE_FREELIST.numfree[index]--;
1143             /* Inlined _PyObject_InitVar() without _PyType_HasFeature() test */
1144 #ifdef Py_TRACE_REFS
1145             /* maybe_freelist_push() ensures these were already set. */
1146             // XXX Can we drop these?  See commit 68055ce6fe01 (GvR, Dec 1998).
1147             Py_SET_SIZE(op, size);
1148             Py_SET_TYPE(op, &PyTuple_Type);
1149 #endif
1150             _Py_NewReference((PyObject *)op);
1151             /* END inlined _PyObject_InitVar() */
1152             OBJECT_STAT_INC(from_freelist);
1153             return op;
1154         }
1155     }
1156 #endif
1157     return NULL;
1158 }
1159 
1160 static inline int
maybe_freelist_push(PyTupleObject * op)1161 maybe_freelist_push(PyTupleObject *op)
1162 {
1163 #ifdef WITH_FREELISTS
1164     struct _Py_object_freelists *freelists = _Py_object_freelists_GET();
1165     if (Py_SIZE(op) == 0) {
1166         return 0;
1167     }
1168     Py_ssize_t index = Py_SIZE(op) - 1;
1169     if (index < PyTuple_NFREELISTS
1170         && TUPLE_FREELIST.numfree[index] < PyTuple_MAXFREELIST
1171         && TUPLE_FREELIST.numfree[index] >= 0
1172         && Py_IS_TYPE(op, &PyTuple_Type))
1173     {
1174         /* op is the head of a linked list, with the first item
1175            pointing to the next node.  Here we set op as the new head. */
1176         op->ob_item[0] = (PyObject *) TUPLE_FREELIST.items[index];
1177         TUPLE_FREELIST.items[index] = op;
1178         TUPLE_FREELIST.numfree[index]++;
1179         OBJECT_STAT_INC(to_freelist);
1180         return 1;
1181     }
1182 #endif
1183     return 0;
1184 }
1185 
1186 static void
maybe_freelist_clear(struct _Py_object_freelists * freelists,int fini)1187 maybe_freelist_clear(struct _Py_object_freelists *freelists, int fini)
1188 {
1189 #ifdef WITH_FREELISTS
1190     for (Py_ssize_t i = 0; i < PyTuple_NFREELISTS; i++) {
1191         PyTupleObject *p = TUPLE_FREELIST.items[i];
1192         TUPLE_FREELIST.items[i] = NULL;
1193         TUPLE_FREELIST.numfree[i] = fini ? -1 : 0;
1194         while (p) {
1195             PyTupleObject *q = p;
1196             p = (PyTupleObject *)(p->ob_item[0]);
1197             PyObject_GC_Del(q);
1198         }
1199     }
1200 #endif
1201 }
1202 
1203 /* Print summary info about the state of the optimized allocator */
1204 void
_PyTuple_DebugMallocStats(FILE * out)1205 _PyTuple_DebugMallocStats(FILE *out)
1206 {
1207 #ifdef WITH_FREELISTS
1208     struct _Py_object_freelists *freelists = _Py_object_freelists_GET();
1209     for (int i = 0; i < PyTuple_NFREELISTS; i++) {
1210         int len = i + 1;
1211         char buf[128];
1212         PyOS_snprintf(buf, sizeof(buf),
1213                       "free %d-sized PyTupleObject", len);
1214         _PyDebugAllocatorStats(out, buf, TUPLE_FREELIST.numfree[i],
1215                                _PyObject_VAR_SIZE(&PyTuple_Type, len));
1216     }
1217 #endif
1218 }
1219 
1220 #undef STATE
1221 #undef FREELIST_FINALIZED
1222