1
2 /* Tuple object implementation */
3
4 #include "Python.h"
5 #include "pycore_object.h"
6 #include "pycore_pystate.h"
7 #include "pycore_accu.h"
8
9 /*[clinic input]
10 class tuple "PyTupleObject *" "&PyTuple_Type"
11 [clinic start generated code]*/
12 /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
13
14 #include "clinic/tupleobject.c.h"
15
16 /* Speed optimization to avoid frequent malloc/free of small tuples */
17 #ifndef PyTuple_MAXSAVESIZE
18 #define PyTuple_MAXSAVESIZE 20 /* Largest tuple to save on free list */
19 #endif
20 #ifndef PyTuple_MAXFREELIST
21 #define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */
22 #endif
23
24 #if PyTuple_MAXSAVESIZE > 0
25 /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty
26 tuple () of which at most one instance will be allocated.
27 */
28 static PyTupleObject *free_list[PyTuple_MAXSAVESIZE];
29 static int numfree[PyTuple_MAXSAVESIZE];
30 #endif
31 #ifdef COUNT_ALLOCS
32 Py_ssize_t _Py_fast_tuple_allocs;
33 Py_ssize_t _Py_tuple_zero_allocs;
34 #endif
35
36 /* Debug statistic to count GC tracking of tuples.
37 Please note that tuples are only untracked when considered by the GC, and
38 many of them will be dead before. Therefore, a tracking rate close to 100%
39 does not necessarily prove that the heuristic is inefficient.
40 */
41 #ifdef SHOW_TRACK_COUNT
42 static Py_ssize_t count_untracked = 0;
43 static Py_ssize_t count_tracked = 0;
44
45 static void
show_track(void)46 show_track(void)
47 {
48 PyInterpreterState *interp = _PyInterpreterState_Get();
49 if (!interp->config.show_alloc_count) {
50 return;
51 }
52
53 fprintf(stderr, "Tuples created: %" PY_FORMAT_SIZE_T "d\n",
54 count_tracked + count_untracked);
55 fprintf(stderr, "Tuples tracked by the GC: %" PY_FORMAT_SIZE_T
56 "d\n", count_tracked);
57 fprintf(stderr, "%.2f%% tuple tracking rate\n\n",
58 (100.0*count_tracked/(count_untracked+count_tracked)));
59 }
60 #endif
61
62 /* Print summary info about the state of the optimized allocator */
63 void
_PyTuple_DebugMallocStats(FILE * out)64 _PyTuple_DebugMallocStats(FILE *out)
65 {
66 #if PyTuple_MAXSAVESIZE > 0
67 int i;
68 char buf[128];
69 for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
70 PyOS_snprintf(buf, sizeof(buf),
71 "free %d-sized PyTupleObject", i);
72 _PyDebugAllocatorStats(out,
73 buf,
74 numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i));
75 }
76 #endif
77 }
78
79 PyObject *
PyTuple_New(Py_ssize_t size)80 PyTuple_New(Py_ssize_t size)
81 {
82 PyTupleObject *op;
83 Py_ssize_t i;
84 if (size < 0) {
85 PyErr_BadInternalCall();
86 return NULL;
87 }
88 #if PyTuple_MAXSAVESIZE > 0
89 if (size == 0 && free_list[0]) {
90 op = free_list[0];
91 Py_INCREF(op);
92 #ifdef COUNT_ALLOCS
93 _Py_tuple_zero_allocs++;
94 #endif
95 return (PyObject *) op;
96 }
97 if (size < PyTuple_MAXSAVESIZE && (op = free_list[size]) != NULL) {
98 free_list[size] = (PyTupleObject *) op->ob_item[0];
99 numfree[size]--;
100 #ifdef COUNT_ALLOCS
101 _Py_fast_tuple_allocs++;
102 #endif
103 /* Inline PyObject_InitVar */
104 #ifdef Py_TRACE_REFS
105 Py_SIZE(op) = size;
106 Py_TYPE(op) = &PyTuple_Type;
107 #endif
108 _Py_NewReference((PyObject *)op);
109 }
110 else
111 #endif
112 {
113 /* Check for overflow */
114 if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - sizeof(PyTupleObject) -
115 sizeof(PyObject *)) / sizeof(PyObject *)) {
116 return PyErr_NoMemory();
117 }
118 op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
119 if (op == NULL)
120 return NULL;
121 }
122 for (i=0; i < size; i++)
123 op->ob_item[i] = NULL;
124 #if PyTuple_MAXSAVESIZE > 0
125 if (size == 0) {
126 free_list[0] = op;
127 ++numfree[0];
128 Py_INCREF(op); /* extra INCREF so that this is never freed */
129 }
130 #endif
131 #ifdef SHOW_TRACK_COUNT
132 count_tracked++;
133 #endif
134 _PyObject_GC_TRACK(op);
135 return (PyObject *) op;
136 }
137
138 Py_ssize_t
PyTuple_Size(PyObject * op)139 PyTuple_Size(PyObject *op)
140 {
141 if (!PyTuple_Check(op)) {
142 PyErr_BadInternalCall();
143 return -1;
144 }
145 else
146 return Py_SIZE(op);
147 }
148
149 PyObject *
PyTuple_GetItem(PyObject * op,Py_ssize_t i)150 PyTuple_GetItem(PyObject *op, Py_ssize_t i)
151 {
152 if (!PyTuple_Check(op)) {
153 PyErr_BadInternalCall();
154 return NULL;
155 }
156 if (i < 0 || i >= Py_SIZE(op)) {
157 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
158 return NULL;
159 }
160 return ((PyTupleObject *)op) -> ob_item[i];
161 }
162
163 int
PyTuple_SetItem(PyObject * op,Py_ssize_t i,PyObject * newitem)164 PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
165 {
166 PyObject **p;
167 if (!PyTuple_Check(op) || op->ob_refcnt != 1) {
168 Py_XDECREF(newitem);
169 PyErr_BadInternalCall();
170 return -1;
171 }
172 if (i < 0 || i >= Py_SIZE(op)) {
173 Py_XDECREF(newitem);
174 PyErr_SetString(PyExc_IndexError,
175 "tuple assignment index out of range");
176 return -1;
177 }
178 p = ((PyTupleObject *)op) -> ob_item + i;
179 Py_XSETREF(*p, newitem);
180 return 0;
181 }
182
183 void
_PyTuple_MaybeUntrack(PyObject * op)184 _PyTuple_MaybeUntrack(PyObject *op)
185 {
186 PyTupleObject *t;
187 Py_ssize_t i, n;
188
189 if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
190 return;
191 t = (PyTupleObject *) op;
192 n = Py_SIZE(t);
193 for (i = 0; i < n; i++) {
194 PyObject *elt = PyTuple_GET_ITEM(t, i);
195 /* Tuple with NULL elements aren't
196 fully constructed, don't untrack
197 them yet. */
198 if (!elt ||
199 _PyObject_GC_MAY_BE_TRACKED(elt))
200 return;
201 }
202 #ifdef SHOW_TRACK_COUNT
203 count_tracked--;
204 count_untracked++;
205 #endif
206 _PyObject_GC_UNTRACK(op);
207 }
208
209 PyObject *
PyTuple_Pack(Py_ssize_t n,...)210 PyTuple_Pack(Py_ssize_t n, ...)
211 {
212 Py_ssize_t i;
213 PyObject *o;
214 PyObject *result;
215 PyObject **items;
216 va_list vargs;
217
218 va_start(vargs, n);
219 result = PyTuple_New(n);
220 if (result == NULL) {
221 va_end(vargs);
222 return NULL;
223 }
224 items = ((PyTupleObject *)result)->ob_item;
225 for (i = 0; i < n; i++) {
226 o = va_arg(vargs, PyObject *);
227 Py_INCREF(o);
228 items[i] = o;
229 }
230 va_end(vargs);
231 return result;
232 }
233
234
235 /* Methods */
236
237 static void
tupledealloc(PyTupleObject * op)238 tupledealloc(PyTupleObject *op)
239 {
240 Py_ssize_t i;
241 Py_ssize_t len = Py_SIZE(op);
242 PyObject_GC_UnTrack(op);
243 Py_TRASHCAN_BEGIN(op, tupledealloc)
244 if (len > 0) {
245 i = len;
246 while (--i >= 0)
247 Py_XDECREF(op->ob_item[i]);
248 #if PyTuple_MAXSAVESIZE > 0
249 if (len < PyTuple_MAXSAVESIZE &&
250 numfree[len] < PyTuple_MAXFREELIST &&
251 Py_TYPE(op) == &PyTuple_Type)
252 {
253 op->ob_item[0] = (PyObject *) free_list[len];
254 numfree[len]++;
255 free_list[len] = op;
256 goto done; /* return */
257 }
258 #endif
259 }
260 Py_TYPE(op)->tp_free((PyObject *)op);
261 done:
262 Py_TRASHCAN_END
263 }
264
265 static PyObject *
tuplerepr(PyTupleObject * v)266 tuplerepr(PyTupleObject *v)
267 {
268 Py_ssize_t i, n;
269 _PyUnicodeWriter writer;
270
271 n = Py_SIZE(v);
272 if (n == 0)
273 return PyUnicode_FromString("()");
274
275 /* While not mutable, it is still possible to end up with a cycle in a
276 tuple through an object that stores itself within a tuple (and thus
277 infinitely asks for the repr of itself). This should only be
278 possible within a type. */
279 i = Py_ReprEnter((PyObject *)v);
280 if (i != 0) {
281 return i > 0 ? PyUnicode_FromString("(...)") : NULL;
282 }
283
284 _PyUnicodeWriter_Init(&writer);
285 writer.overallocate = 1;
286 if (Py_SIZE(v) > 1) {
287 /* "(" + "1" + ", 2" * (len - 1) + ")" */
288 writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
289 }
290 else {
291 /* "(1,)" */
292 writer.min_length = 4;
293 }
294
295 if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
296 goto error;
297
298 /* Do repr() on each element. */
299 for (i = 0; i < n; ++i) {
300 PyObject *s;
301
302 if (i > 0) {
303 if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
304 goto error;
305 }
306
307 s = PyObject_Repr(v->ob_item[i]);
308 if (s == NULL)
309 goto error;
310
311 if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
312 Py_DECREF(s);
313 goto error;
314 }
315 Py_DECREF(s);
316 }
317
318 writer.overallocate = 0;
319 if (n > 1) {
320 if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
321 goto error;
322 }
323 else {
324 if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
325 goto error;
326 }
327
328 Py_ReprLeave((PyObject *)v);
329 return _PyUnicodeWriter_Finish(&writer);
330
331 error:
332 _PyUnicodeWriter_Dealloc(&writer);
333 Py_ReprLeave((PyObject *)v);
334 return NULL;
335 }
336
337
338 /* Hash for tuples. This is a slightly simplified version of the xxHash
339 non-cryptographic hash:
340 - we do not use any parallellism, there is only 1 accumulator.
341 - we drop the final mixing since this is just a permutation of the
342 output space: it does not help against collisions.
343 - at the end, we mangle the length with a single constant.
344 For the xxHash specification, see
345 https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
346
347 Below are the official constants from the xxHash specification. Optimizing
348 compilers should emit a single "rotate" instruction for the
349 _PyHASH_XXROTATE() expansion. If that doesn't happen for some important
350 platform, the macro could be changed to expand to a platform-specific rotate
351 spelling instead.
352 */
353 #if SIZEOF_PY_UHASH_T > 4
354 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
355 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
356 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
357 #define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */
358 #else
359 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
360 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
361 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
362 #define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */
363 #endif
364
365 /* Tests have shown that it's not worth to cache the hash value, see
366 https://bugs.python.org/issue9685 */
367 static Py_hash_t
tuplehash(PyTupleObject * v)368 tuplehash(PyTupleObject *v)
369 {
370 Py_ssize_t i, len = Py_SIZE(v);
371 PyObject **item = v->ob_item;
372
373 Py_uhash_t acc = _PyHASH_XXPRIME_5;
374 for (i = 0; i < len; i++) {
375 Py_uhash_t lane = PyObject_Hash(item[i]);
376 if (lane == (Py_uhash_t)-1) {
377 return -1;
378 }
379 acc += lane * _PyHASH_XXPRIME_2;
380 acc = _PyHASH_XXROTATE(acc);
381 acc *= _PyHASH_XXPRIME_1;
382 }
383
384 /* Add input length, mangled to keep the historical value of hash(()). */
385 acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL);
386
387 if (acc == (Py_uhash_t)-1) {
388 return 1546275796;
389 }
390 return acc;
391 }
392
393 static Py_ssize_t
tuplelength(PyTupleObject * a)394 tuplelength(PyTupleObject *a)
395 {
396 return Py_SIZE(a);
397 }
398
399 static int
tuplecontains(PyTupleObject * a,PyObject * el)400 tuplecontains(PyTupleObject *a, PyObject *el)
401 {
402 Py_ssize_t i;
403 int cmp;
404
405 for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i)
406 cmp = PyObject_RichCompareBool(el, PyTuple_GET_ITEM(a, i),
407 Py_EQ);
408 return cmp;
409 }
410
411 static PyObject *
tupleitem(PyTupleObject * a,Py_ssize_t i)412 tupleitem(PyTupleObject *a, Py_ssize_t i)
413 {
414 if (i < 0 || i >= Py_SIZE(a)) {
415 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
416 return NULL;
417 }
418 Py_INCREF(a->ob_item[i]);
419 return a->ob_item[i];
420 }
421
422 PyObject *
_PyTuple_FromArray(PyObject * const * src,Py_ssize_t n)423 _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
424 {
425 PyTupleObject *tuple = (PyTupleObject *)PyTuple_New(n);
426 if (tuple == NULL) {
427 return NULL;
428 }
429 PyObject **dst = tuple->ob_item;
430 for (Py_ssize_t i = 0; i < n; i++) {
431 PyObject *item = src[i];
432 Py_INCREF(item);
433 dst[i] = item;
434 }
435 return (PyObject *)tuple;
436 }
437
438 static PyObject *
tupleslice(PyTupleObject * a,Py_ssize_t ilow,Py_ssize_t ihigh)439 tupleslice(PyTupleObject *a, Py_ssize_t ilow,
440 Py_ssize_t ihigh)
441 {
442 if (ilow < 0)
443 ilow = 0;
444 if (ihigh > Py_SIZE(a))
445 ihigh = Py_SIZE(a);
446 if (ihigh < ilow)
447 ihigh = ilow;
448 if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
449 Py_INCREF(a);
450 return (PyObject *)a;
451 }
452 return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
453 }
454
455 PyObject *
PyTuple_GetSlice(PyObject * op,Py_ssize_t i,Py_ssize_t j)456 PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
457 {
458 if (op == NULL || !PyTuple_Check(op)) {
459 PyErr_BadInternalCall();
460 return NULL;
461 }
462 return tupleslice((PyTupleObject *)op, i, j);
463 }
464
465 static PyObject *
tupleconcat(PyTupleObject * a,PyObject * bb)466 tupleconcat(PyTupleObject *a, PyObject *bb)
467 {
468 Py_ssize_t size;
469 Py_ssize_t i;
470 PyObject **src, **dest;
471 PyTupleObject *np;
472 if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
473 Py_INCREF(bb);
474 return bb;
475 }
476 if (!PyTuple_Check(bb)) {
477 PyErr_Format(PyExc_TypeError,
478 "can only concatenate tuple (not \"%.200s\") to tuple",
479 Py_TYPE(bb)->tp_name);
480 return NULL;
481 }
482 #define b ((PyTupleObject *)bb)
483 if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
484 Py_INCREF(a);
485 return (PyObject *)a;
486 }
487 if (Py_SIZE(a) > PY_SSIZE_T_MAX - Py_SIZE(b))
488 return PyErr_NoMemory();
489 size = Py_SIZE(a) + Py_SIZE(b);
490 np = (PyTupleObject *) PyTuple_New(size);
491 if (np == NULL) {
492 return NULL;
493 }
494 src = a->ob_item;
495 dest = np->ob_item;
496 for (i = 0; i < Py_SIZE(a); i++) {
497 PyObject *v = src[i];
498 Py_INCREF(v);
499 dest[i] = v;
500 }
501 src = b->ob_item;
502 dest = np->ob_item + Py_SIZE(a);
503 for (i = 0; i < Py_SIZE(b); i++) {
504 PyObject *v = src[i];
505 Py_INCREF(v);
506 dest[i] = v;
507 }
508 return (PyObject *)np;
509 #undef b
510 }
511
512 static PyObject *
tuplerepeat(PyTupleObject * a,Py_ssize_t n)513 tuplerepeat(PyTupleObject *a, Py_ssize_t n)
514 {
515 Py_ssize_t i, j;
516 Py_ssize_t size;
517 PyTupleObject *np;
518 PyObject **p, **items;
519 if (n < 0)
520 n = 0;
521 if (Py_SIZE(a) == 0 || n == 1) {
522 if (PyTuple_CheckExact(a)) {
523 /* Since tuples are immutable, we can return a shared
524 copy in this case */
525 Py_INCREF(a);
526 return (PyObject *)a;
527 }
528 if (Py_SIZE(a) == 0)
529 return PyTuple_New(0);
530 }
531 if (n > PY_SSIZE_T_MAX / Py_SIZE(a))
532 return PyErr_NoMemory();
533 size = Py_SIZE(a) * n;
534 np = (PyTupleObject *) PyTuple_New(size);
535 if (np == NULL)
536 return NULL;
537 p = np->ob_item;
538 items = a->ob_item;
539 for (i = 0; i < n; i++) {
540 for (j = 0; j < Py_SIZE(a); j++) {
541 *p = items[j];
542 Py_INCREF(*p);
543 p++;
544 }
545 }
546 return (PyObject *) np;
547 }
548
549 /*[clinic input]
550 tuple.index
551
552 value: object
553 start: slice_index(accept={int}) = 0
554 stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
555 /
556
557 Return first index of value.
558
559 Raises ValueError if the value is not present.
560 [clinic start generated code]*/
561
562 static PyObject *
tuple_index_impl(PyTupleObject * self,PyObject * value,Py_ssize_t start,Py_ssize_t stop)563 tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
564 Py_ssize_t stop)
565 /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
566 {
567 Py_ssize_t i;
568
569 if (start < 0) {
570 start += Py_SIZE(self);
571 if (start < 0)
572 start = 0;
573 }
574 if (stop < 0) {
575 stop += Py_SIZE(self);
576 }
577 else if (stop > Py_SIZE(self)) {
578 stop = Py_SIZE(self);
579 }
580 for (i = start; i < stop; i++) {
581 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
582 if (cmp > 0)
583 return PyLong_FromSsize_t(i);
584 else if (cmp < 0)
585 return NULL;
586 }
587 PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
588 return NULL;
589 }
590
591 /*[clinic input]
592 tuple.count
593
594 value: object
595 /
596
597 Return number of occurrences of value.
598 [clinic start generated code]*/
599
600 static PyObject *
tuple_count(PyTupleObject * self,PyObject * value)601 tuple_count(PyTupleObject *self, PyObject *value)
602 /*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/
603 {
604 Py_ssize_t count = 0;
605 Py_ssize_t i;
606
607 for (i = 0; i < Py_SIZE(self); i++) {
608 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
609 if (cmp > 0)
610 count++;
611 else if (cmp < 0)
612 return NULL;
613 }
614 return PyLong_FromSsize_t(count);
615 }
616
617 static int
tupletraverse(PyTupleObject * o,visitproc visit,void * arg)618 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
619 {
620 Py_ssize_t i;
621
622 for (i = Py_SIZE(o); --i >= 0; )
623 Py_VISIT(o->ob_item[i]);
624 return 0;
625 }
626
627 static PyObject *
tuplerichcompare(PyObject * v,PyObject * w,int op)628 tuplerichcompare(PyObject *v, PyObject *w, int op)
629 {
630 PyTupleObject *vt, *wt;
631 Py_ssize_t i;
632 Py_ssize_t vlen, wlen;
633
634 if (!PyTuple_Check(v) || !PyTuple_Check(w))
635 Py_RETURN_NOTIMPLEMENTED;
636
637 vt = (PyTupleObject *)v;
638 wt = (PyTupleObject *)w;
639
640 vlen = Py_SIZE(vt);
641 wlen = Py_SIZE(wt);
642
643 /* Note: the corresponding code for lists has an "early out" test
644 * here when op is EQ or NE and the lengths differ. That pays there,
645 * but Tim was unable to find any real code where EQ/NE tuple
646 * compares don't have the same length, so testing for it here would
647 * have cost without benefit.
648 */
649
650 /* Search for the first index where items are different.
651 * Note that because tuples are immutable, it's safe to reuse
652 * vlen and wlen across the comparison calls.
653 */
654 for (i = 0; i < vlen && i < wlen; i++) {
655 int k = PyObject_RichCompareBool(vt->ob_item[i],
656 wt->ob_item[i], Py_EQ);
657 if (k < 0)
658 return NULL;
659 if (!k)
660 break;
661 }
662
663 if (i >= vlen || i >= wlen) {
664 /* No more items to compare -- compare sizes */
665 Py_RETURN_RICHCOMPARE(vlen, wlen, op);
666 }
667
668 /* We have an item that differs -- shortcuts for EQ/NE */
669 if (op == Py_EQ) {
670 Py_RETURN_FALSE;
671 }
672 if (op == Py_NE) {
673 Py_RETURN_TRUE;
674 }
675
676 /* Compare the final item again using the proper operator */
677 return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
678 }
679
680 static PyObject *
681 tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
682
683 /*[clinic input]
684 @classmethod
685 tuple.__new__ as tuple_new
686 iterable: object(c_default="NULL") = ()
687 /
688
689 Built-in immutable sequence.
690
691 If no argument is given, the constructor returns an empty tuple.
692 If iterable is specified the tuple is initialized from iterable's items.
693
694 If the argument is a tuple, the return value is the same object.
695 [clinic start generated code]*/
696
697 static PyObject *
tuple_new_impl(PyTypeObject * type,PyObject * iterable)698 tuple_new_impl(PyTypeObject *type, PyObject *iterable)
699 /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
700 {
701 if (type != &PyTuple_Type)
702 return tuple_subtype_new(type, iterable);
703
704 if (iterable == NULL)
705 return PyTuple_New(0);
706 else
707 return PySequence_Tuple(iterable);
708 }
709
710 static PyObject *
tuple_subtype_new(PyTypeObject * type,PyObject * iterable)711 tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
712 {
713 PyObject *tmp, *newobj, *item;
714 Py_ssize_t i, n;
715
716 assert(PyType_IsSubtype(type, &PyTuple_Type));
717 tmp = tuple_new_impl(&PyTuple_Type, iterable);
718 if (tmp == NULL)
719 return NULL;
720 assert(PyTuple_Check(tmp));
721 newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
722 if (newobj == NULL)
723 return NULL;
724 for (i = 0; i < n; i++) {
725 item = PyTuple_GET_ITEM(tmp, i);
726 Py_INCREF(item);
727 PyTuple_SET_ITEM(newobj, i, item);
728 }
729 Py_DECREF(tmp);
730 return newobj;
731 }
732
733 static PySequenceMethods tuple_as_sequence = {
734 (lenfunc)tuplelength, /* sq_length */
735 (binaryfunc)tupleconcat, /* sq_concat */
736 (ssizeargfunc)tuplerepeat, /* sq_repeat */
737 (ssizeargfunc)tupleitem, /* sq_item */
738 0, /* sq_slice */
739 0, /* sq_ass_item */
740 0, /* sq_ass_slice */
741 (objobjproc)tuplecontains, /* sq_contains */
742 };
743
744 static PyObject*
tuplesubscript(PyTupleObject * self,PyObject * item)745 tuplesubscript(PyTupleObject* self, PyObject* item)
746 {
747 if (PyIndex_Check(item)) {
748 Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
749 if (i == -1 && PyErr_Occurred())
750 return NULL;
751 if (i < 0)
752 i += PyTuple_GET_SIZE(self);
753 return tupleitem(self, i);
754 }
755 else if (PySlice_Check(item)) {
756 Py_ssize_t start, stop, step, slicelength, i;
757 size_t cur;
758 PyObject* result;
759 PyObject* it;
760 PyObject **src, **dest;
761
762 if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
763 return NULL;
764 }
765 slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
766 &stop, step);
767
768 if (slicelength <= 0) {
769 return PyTuple_New(0);
770 }
771 else if (start == 0 && step == 1 &&
772 slicelength == PyTuple_GET_SIZE(self) &&
773 PyTuple_CheckExact(self)) {
774 Py_INCREF(self);
775 return (PyObject *)self;
776 }
777 else {
778 result = PyTuple_New(slicelength);
779 if (!result) return NULL;
780
781 src = self->ob_item;
782 dest = ((PyTupleObject *)result)->ob_item;
783 for (cur = start, i = 0; i < slicelength;
784 cur += step, i++) {
785 it = src[cur];
786 Py_INCREF(it);
787 dest[i] = it;
788 }
789
790 return result;
791 }
792 }
793 else {
794 PyErr_Format(PyExc_TypeError,
795 "tuple indices must be integers or slices, not %.200s",
796 Py_TYPE(item)->tp_name);
797 return NULL;
798 }
799 }
800
801 /*[clinic input]
802 tuple.__getnewargs__
803 [clinic start generated code]*/
804
805 static PyObject *
tuple___getnewargs___impl(PyTupleObject * self)806 tuple___getnewargs___impl(PyTupleObject *self)
807 /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
808 {
809 return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self)));
810 }
811
812 static PyMethodDef tuple_methods[] = {
813 TUPLE___GETNEWARGS___METHODDEF
814 TUPLE_INDEX_METHODDEF
815 TUPLE_COUNT_METHODDEF
816 {NULL, NULL} /* sentinel */
817 };
818
819 static PyMappingMethods tuple_as_mapping = {
820 (lenfunc)tuplelength,
821 (binaryfunc)tuplesubscript,
822 0
823 };
824
825 static PyObject *tuple_iter(PyObject *seq);
826
827 PyTypeObject PyTuple_Type = {
828 PyVarObject_HEAD_INIT(&PyType_Type, 0)
829 "tuple",
830 sizeof(PyTupleObject) - sizeof(PyObject *),
831 sizeof(PyObject *),
832 (destructor)tupledealloc, /* tp_dealloc */
833 0, /* tp_vectorcall_offset */
834 0, /* tp_getattr */
835 0, /* tp_setattr */
836 0, /* tp_as_async */
837 (reprfunc)tuplerepr, /* tp_repr */
838 0, /* tp_as_number */
839 &tuple_as_sequence, /* tp_as_sequence */
840 &tuple_as_mapping, /* tp_as_mapping */
841 (hashfunc)tuplehash, /* tp_hash */
842 0, /* tp_call */
843 0, /* tp_str */
844 PyObject_GenericGetAttr, /* tp_getattro */
845 0, /* tp_setattro */
846 0, /* tp_as_buffer */
847 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
848 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS, /* tp_flags */
849 tuple_new__doc__, /* tp_doc */
850 (traverseproc)tupletraverse, /* tp_traverse */
851 0, /* tp_clear */
852 tuplerichcompare, /* tp_richcompare */
853 0, /* tp_weaklistoffset */
854 tuple_iter, /* tp_iter */
855 0, /* tp_iternext */
856 tuple_methods, /* tp_methods */
857 0, /* tp_members */
858 0, /* tp_getset */
859 0, /* tp_base */
860 0, /* tp_dict */
861 0, /* tp_descr_get */
862 0, /* tp_descr_set */
863 0, /* tp_dictoffset */
864 0, /* tp_init */
865 0, /* tp_alloc */
866 tuple_new, /* tp_new */
867 PyObject_GC_Del, /* tp_free */
868 };
869
870 /* The following function breaks the notion that tuples are immutable:
871 it changes the size of a tuple. We get away with this only if there
872 is only one module referencing the object. You can also think of it
873 as creating a new tuple object and destroying the old one, only more
874 efficiently. In any case, don't use this if the tuple may already be
875 known to some other part of the code. */
876
877 int
_PyTuple_Resize(PyObject ** pv,Py_ssize_t newsize)878 _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
879 {
880 PyTupleObject *v;
881 PyTupleObject *sv;
882 Py_ssize_t i;
883 Py_ssize_t oldsize;
884
885 v = (PyTupleObject *) *pv;
886 if (v == NULL || Py_TYPE(v) != &PyTuple_Type ||
887 (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) {
888 *pv = 0;
889 Py_XDECREF(v);
890 PyErr_BadInternalCall();
891 return -1;
892 }
893 oldsize = Py_SIZE(v);
894 if (oldsize == newsize)
895 return 0;
896
897 if (oldsize == 0) {
898 /* Empty tuples are often shared, so we should never
899 resize them in-place even if we do own the only
900 (current) reference */
901 Py_DECREF(v);
902 *pv = PyTuple_New(newsize);
903 return *pv == NULL ? -1 : 0;
904 }
905
906 /* XXX UNREF/NEWREF interface should be more symmetrical */
907 _Py_DEC_REFTOTAL;
908 if (_PyObject_GC_IS_TRACKED(v))
909 _PyObject_GC_UNTRACK(v);
910 _Py_ForgetReference((PyObject *) v);
911 /* DECREF items deleted by shrinkage */
912 for (i = newsize; i < oldsize; i++) {
913 Py_CLEAR(v->ob_item[i]);
914 }
915 sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
916 if (sv == NULL) {
917 *pv = NULL;
918 PyObject_GC_Del(v);
919 return -1;
920 }
921 _Py_NewReference((PyObject *) sv);
922 /* Zero out items added by growing */
923 if (newsize > oldsize)
924 memset(&sv->ob_item[oldsize], 0,
925 sizeof(*sv->ob_item) * (newsize - oldsize));
926 *pv = (PyObject *) sv;
927 _PyObject_GC_TRACK(sv);
928 return 0;
929 }
930
931 int
PyTuple_ClearFreeList(void)932 PyTuple_ClearFreeList(void)
933 {
934 int freelist_size = 0;
935 #if PyTuple_MAXSAVESIZE > 0
936 int i;
937 for (i = 1; i < PyTuple_MAXSAVESIZE; i++) {
938 PyTupleObject *p, *q;
939 p = free_list[i];
940 freelist_size += numfree[i];
941 free_list[i] = NULL;
942 numfree[i] = 0;
943 while (p) {
944 q = p;
945 p = (PyTupleObject *)(p->ob_item[0]);
946 PyObject_GC_Del(q);
947 }
948 }
949 #endif
950 return freelist_size;
951 }
952
953 void
PyTuple_Fini(void)954 PyTuple_Fini(void)
955 {
956 #if PyTuple_MAXSAVESIZE > 0
957 /* empty tuples are used all over the place and applications may
958 * rely on the fact that an empty tuple is a singleton. */
959 Py_CLEAR(free_list[0]);
960
961 (void)PyTuple_ClearFreeList();
962 #endif
963 #ifdef SHOW_TRACK_COUNT
964 show_track();
965 #endif
966 }
967
968 /*********************** Tuple Iterator **************************/
969
970 typedef struct {
971 PyObject_HEAD
972 Py_ssize_t it_index;
973 PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
974 } tupleiterobject;
975
976 static void
tupleiter_dealloc(tupleiterobject * it)977 tupleiter_dealloc(tupleiterobject *it)
978 {
979 _PyObject_GC_UNTRACK(it);
980 Py_XDECREF(it->it_seq);
981 PyObject_GC_Del(it);
982 }
983
984 static int
tupleiter_traverse(tupleiterobject * it,visitproc visit,void * arg)985 tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
986 {
987 Py_VISIT(it->it_seq);
988 return 0;
989 }
990
991 static PyObject *
tupleiter_next(tupleiterobject * it)992 tupleiter_next(tupleiterobject *it)
993 {
994 PyTupleObject *seq;
995 PyObject *item;
996
997 assert(it != NULL);
998 seq = it->it_seq;
999 if (seq == NULL)
1000 return NULL;
1001 assert(PyTuple_Check(seq));
1002
1003 if (it->it_index < PyTuple_GET_SIZE(seq)) {
1004 item = PyTuple_GET_ITEM(seq, it->it_index);
1005 ++it->it_index;
1006 Py_INCREF(item);
1007 return item;
1008 }
1009
1010 it->it_seq = NULL;
1011 Py_DECREF(seq);
1012 return NULL;
1013 }
1014
1015 static PyObject *
tupleiter_len(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1016 tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1017 {
1018 Py_ssize_t len = 0;
1019 if (it->it_seq)
1020 len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1021 return PyLong_FromSsize_t(len);
1022 }
1023
1024 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1025
1026 static PyObject *
tupleiter_reduce(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1027 tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1028 {
1029 _Py_IDENTIFIER(iter);
1030 if (it->it_seq)
1031 return Py_BuildValue("N(O)n", _PyEval_GetBuiltinId(&PyId_iter),
1032 it->it_seq, it->it_index);
1033 else
1034 return Py_BuildValue("N(())", _PyEval_GetBuiltinId(&PyId_iter));
1035 }
1036
1037 static PyObject *
tupleiter_setstate(tupleiterobject * it,PyObject * state)1038 tupleiter_setstate(tupleiterobject *it, PyObject *state)
1039 {
1040 Py_ssize_t index = PyLong_AsSsize_t(state);
1041 if (index == -1 && PyErr_Occurred())
1042 return NULL;
1043 if (it->it_seq != NULL) {
1044 if (index < 0)
1045 index = 0;
1046 else if (index > PyTuple_GET_SIZE(it->it_seq))
1047 index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1048 it->it_index = index;
1049 }
1050 Py_RETURN_NONE;
1051 }
1052
1053 PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1054 PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1055
1056 static PyMethodDef tupleiter_methods[] = {
1057 {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1058 {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1059 {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1060 {NULL, NULL} /* sentinel */
1061 };
1062
1063 PyTypeObject PyTupleIter_Type = {
1064 PyVarObject_HEAD_INIT(&PyType_Type, 0)
1065 "tuple_iterator", /* tp_name */
1066 sizeof(tupleiterobject), /* tp_basicsize */
1067 0, /* tp_itemsize */
1068 /* methods */
1069 (destructor)tupleiter_dealloc, /* tp_dealloc */
1070 0, /* tp_vectorcall_offset */
1071 0, /* tp_getattr */
1072 0, /* tp_setattr */
1073 0, /* tp_as_async */
1074 0, /* tp_repr */
1075 0, /* tp_as_number */
1076 0, /* tp_as_sequence */
1077 0, /* tp_as_mapping */
1078 0, /* tp_hash */
1079 0, /* tp_call */
1080 0, /* tp_str */
1081 PyObject_GenericGetAttr, /* tp_getattro */
1082 0, /* tp_setattro */
1083 0, /* tp_as_buffer */
1084 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1085 0, /* tp_doc */
1086 (traverseproc)tupleiter_traverse, /* tp_traverse */
1087 0, /* tp_clear */
1088 0, /* tp_richcompare */
1089 0, /* tp_weaklistoffset */
1090 PyObject_SelfIter, /* tp_iter */
1091 (iternextfunc)tupleiter_next, /* tp_iternext */
1092 tupleiter_methods, /* tp_methods */
1093 0,
1094 };
1095
1096 static PyObject *
tuple_iter(PyObject * seq)1097 tuple_iter(PyObject *seq)
1098 {
1099 tupleiterobject *it;
1100
1101 if (!PyTuple_Check(seq)) {
1102 PyErr_BadInternalCall();
1103 return NULL;
1104 }
1105 it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
1106 if (it == NULL)
1107 return NULL;
1108 it->it_index = 0;
1109 Py_INCREF(seq);
1110 it->it_seq = (PyTupleObject *)seq;
1111 _PyObject_GC_TRACK(it);
1112 return (PyObject *)it;
1113 }
1114