1
2 /* Tuple object implementation */
3
4 #include "Python.h"
5 #include "pycore_abstract.h" // _PyIndex_Check()
6 #include "pycore_gc.h" // _PyObject_GC_IS_TRACKED()
7 #include "pycore_initconfig.h" // _PyStatus_OK()
8 #include "pycore_object.h" // _PyObject_GC_TRACK()
9
10 /*[clinic input]
11 class tuple "PyTupleObject *" "&PyTuple_Type"
12 [clinic start generated code]*/
13 /*[clinic end generated code: output=da39a3ee5e6b4b0d input=f051ba3cfdf9a189]*/
14
15 #include "clinic/tupleobject.c.h"
16
17
18 #if PyTuple_MAXSAVESIZE > 0
19 static struct _Py_tuple_state *
get_tuple_state(void)20 get_tuple_state(void)
21 {
22 PyInterpreterState *interp = _PyInterpreterState_GET();
23 return &interp->tuple;
24 }
25 #endif
26
27
28 static inline void
tuple_gc_track(PyTupleObject * op)29 tuple_gc_track(PyTupleObject *op)
30 {
31 _PyObject_GC_TRACK(op);
32 }
33
34
35 /* Print summary info about the state of the optimized allocator */
36 void
_PyTuple_DebugMallocStats(FILE * out)37 _PyTuple_DebugMallocStats(FILE *out)
38 {
39 #if PyTuple_MAXSAVESIZE > 0
40 struct _Py_tuple_state *state = get_tuple_state();
41 for (int i = 1; i < PyTuple_MAXSAVESIZE; i++) {
42 char buf[128];
43 PyOS_snprintf(buf, sizeof(buf),
44 "free %d-sized PyTupleObject", i);
45 _PyDebugAllocatorStats(out, buf, state->numfree[i],
46 _PyObject_VAR_SIZE(&PyTuple_Type, i));
47 }
48 #endif
49 }
50
51 /* Allocate an uninitialized tuple object. Before making it public following
52 steps must be done:
53 - initialize its items
54 - call tuple_gc_track() on it
55 Because the empty tuple is always reused and it's already tracked by GC,
56 this function must not be called with size == 0 (unless from PyTuple_New()
57 which wraps this function).
58 */
59 static PyTupleObject *
tuple_alloc(Py_ssize_t size)60 tuple_alloc(Py_ssize_t size)
61 {
62 PyTupleObject *op;
63 #if PyTuple_MAXSAVESIZE > 0
64 // If Python is built with the empty tuple singleton,
65 // tuple_alloc(0) must not be called.
66 assert(size != 0);
67 #endif
68 if (size < 0) {
69 PyErr_BadInternalCall();
70 return NULL;
71 }
72
73 #if PyTuple_MAXSAVESIZE > 0
74 struct _Py_tuple_state *state = get_tuple_state();
75 #ifdef Py_DEBUG
76 // tuple_alloc() must not be called after _PyTuple_Fini()
77 assert(state->numfree[0] != -1);
78 #endif
79 if (size < PyTuple_MAXSAVESIZE && (op = state->free_list[size]) != NULL) {
80 assert(size != 0);
81 state->free_list[size] = (PyTupleObject *) op->ob_item[0];
82 state->numfree[size]--;
83 /* Inlined _PyObject_InitVar() without _PyType_HasFeature() test */
84 #ifdef Py_TRACE_REFS
85 Py_SET_SIZE(op, size);
86 Py_SET_TYPE(op, &PyTuple_Type);
87 #endif
88 _Py_NewReference((PyObject *)op);
89 }
90 else
91 #endif
92 {
93 /* Check for overflow */
94 if ((size_t)size > ((size_t)PY_SSIZE_T_MAX - (sizeof(PyTupleObject) -
95 sizeof(PyObject *))) / sizeof(PyObject *)) {
96 return (PyTupleObject *)PyErr_NoMemory();
97 }
98 op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, size);
99 if (op == NULL)
100 return NULL;
101 }
102 return op;
103 }
104
105 static int
tuple_create_empty_tuple_singleton(struct _Py_tuple_state * state)106 tuple_create_empty_tuple_singleton(struct _Py_tuple_state *state)
107 {
108 #if PyTuple_MAXSAVESIZE > 0
109 assert(state->free_list[0] == NULL);
110
111 PyTupleObject *op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, 0);
112 if (op == NULL) {
113 return -1;
114 }
115 // The empty tuple singleton is not tracked by the GC.
116 // It does not contain any Python object.
117
118 state->free_list[0] = op;
119 state->numfree[0]++;
120
121 assert(state->numfree[0] == 1);
122 #endif
123 return 0;
124 }
125
126
127 static PyObject *
tuple_get_empty(void)128 tuple_get_empty(void)
129 {
130 #if PyTuple_MAXSAVESIZE > 0
131 struct _Py_tuple_state *state = get_tuple_state();
132 PyTupleObject *op = state->free_list[0];
133 // tuple_get_empty() must not be called before _PyTuple_Init()
134 // or after _PyTuple_Fini()
135 assert(op != NULL);
136 #ifdef Py_DEBUG
137 assert(state->numfree[0] != -1);
138 #endif
139
140 Py_INCREF(op);
141 return (PyObject *) op;
142 #else
143 return PyTuple_New(0);
144 #endif
145 }
146
147
148 PyObject *
PyTuple_New(Py_ssize_t size)149 PyTuple_New(Py_ssize_t size)
150 {
151 PyTupleObject *op;
152 #if PyTuple_MAXSAVESIZE > 0
153 if (size == 0) {
154 return tuple_get_empty();
155 }
156 #endif
157 op = tuple_alloc(size);
158 if (op == NULL) {
159 return NULL;
160 }
161 for (Py_ssize_t i = 0; i < size; i++) {
162 op->ob_item[i] = NULL;
163 }
164 tuple_gc_track(op);
165 return (PyObject *) op;
166 }
167
168 Py_ssize_t
PyTuple_Size(PyObject * op)169 PyTuple_Size(PyObject *op)
170 {
171 if (!PyTuple_Check(op)) {
172 PyErr_BadInternalCall();
173 return -1;
174 }
175 else
176 return Py_SIZE(op);
177 }
178
179 PyObject *
PyTuple_GetItem(PyObject * op,Py_ssize_t i)180 PyTuple_GetItem(PyObject *op, Py_ssize_t i)
181 {
182 if (!PyTuple_Check(op)) {
183 PyErr_BadInternalCall();
184 return NULL;
185 }
186 if (i < 0 || i >= Py_SIZE(op)) {
187 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
188 return NULL;
189 }
190 return ((PyTupleObject *)op) -> ob_item[i];
191 }
192
193 int
PyTuple_SetItem(PyObject * op,Py_ssize_t i,PyObject * newitem)194 PyTuple_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem)
195 {
196 PyObject **p;
197 if (!PyTuple_Check(op) || Py_REFCNT(op) != 1) {
198 Py_XDECREF(newitem);
199 PyErr_BadInternalCall();
200 return -1;
201 }
202 if (i < 0 || i >= Py_SIZE(op)) {
203 Py_XDECREF(newitem);
204 PyErr_SetString(PyExc_IndexError,
205 "tuple assignment index out of range");
206 return -1;
207 }
208 p = ((PyTupleObject *)op) -> ob_item + i;
209 Py_XSETREF(*p, newitem);
210 return 0;
211 }
212
213 void
_PyTuple_MaybeUntrack(PyObject * op)214 _PyTuple_MaybeUntrack(PyObject *op)
215 {
216 PyTupleObject *t;
217 Py_ssize_t i, n;
218
219 if (!PyTuple_CheckExact(op) || !_PyObject_GC_IS_TRACKED(op))
220 return;
221 t = (PyTupleObject *) op;
222 n = Py_SIZE(t);
223 for (i = 0; i < n; i++) {
224 PyObject *elt = PyTuple_GET_ITEM(t, i);
225 /* Tuple with NULL elements aren't
226 fully constructed, don't untrack
227 them yet. */
228 if (!elt ||
229 _PyObject_GC_MAY_BE_TRACKED(elt))
230 return;
231 }
232 _PyObject_GC_UNTRACK(op);
233 }
234
235 PyObject *
PyTuple_Pack(Py_ssize_t n,...)236 PyTuple_Pack(Py_ssize_t n, ...)
237 {
238 Py_ssize_t i;
239 PyObject *o;
240 PyObject **items;
241 va_list vargs;
242
243 if (n == 0) {
244 return tuple_get_empty();
245 }
246
247 va_start(vargs, n);
248 PyTupleObject *result = tuple_alloc(n);
249 if (result == NULL) {
250 va_end(vargs);
251 return NULL;
252 }
253 items = result->ob_item;
254 for (i = 0; i < n; i++) {
255 o = va_arg(vargs, PyObject *);
256 Py_INCREF(o);
257 items[i] = o;
258 }
259 va_end(vargs);
260 tuple_gc_track(result);
261 return (PyObject *)result;
262 }
263
264
265 /* Methods */
266
267 static void
tupledealloc(PyTupleObject * op)268 tupledealloc(PyTupleObject *op)
269 {
270 Py_ssize_t len = Py_SIZE(op);
271 PyObject_GC_UnTrack(op);
272 Py_TRASHCAN_BEGIN(op, tupledealloc)
273 if (len > 0) {
274 Py_ssize_t i = len;
275 while (--i >= 0) {
276 Py_XDECREF(op->ob_item[i]);
277 }
278 #if PyTuple_MAXSAVESIZE > 0
279 struct _Py_tuple_state *state = get_tuple_state();
280 #ifdef Py_DEBUG
281 // tupledealloc() must not be called after _PyTuple_Fini()
282 assert(state->numfree[0] != -1);
283 #endif
284 if (len < PyTuple_MAXSAVESIZE
285 && state->numfree[len] < PyTuple_MAXFREELIST
286 && Py_IS_TYPE(op, &PyTuple_Type))
287 {
288 op->ob_item[0] = (PyObject *) state->free_list[len];
289 state->numfree[len]++;
290 state->free_list[len] = op;
291 goto done; /* return */
292 }
293 #endif
294 }
295 Py_TYPE(op)->tp_free((PyObject *)op);
296
297 #if PyTuple_MAXSAVESIZE > 0
298 done:
299 #endif
300 Py_TRASHCAN_END
301 }
302
303 static PyObject *
tuplerepr(PyTupleObject * v)304 tuplerepr(PyTupleObject *v)
305 {
306 Py_ssize_t i, n;
307 _PyUnicodeWriter writer;
308
309 n = Py_SIZE(v);
310 if (n == 0)
311 return PyUnicode_FromString("()");
312
313 /* While not mutable, it is still possible to end up with a cycle in a
314 tuple through an object that stores itself within a tuple (and thus
315 infinitely asks for the repr of itself). This should only be
316 possible within a type. */
317 i = Py_ReprEnter((PyObject *)v);
318 if (i != 0) {
319 return i > 0 ? PyUnicode_FromString("(...)") : NULL;
320 }
321
322 _PyUnicodeWriter_Init(&writer);
323 writer.overallocate = 1;
324 if (Py_SIZE(v) > 1) {
325 /* "(" + "1" + ", 2" * (len - 1) + ")" */
326 writer.min_length = 1 + 1 + (2 + 1) * (Py_SIZE(v) - 1) + 1;
327 }
328 else {
329 /* "(1,)" */
330 writer.min_length = 4;
331 }
332
333 if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0)
334 goto error;
335
336 /* Do repr() on each element. */
337 for (i = 0; i < n; ++i) {
338 PyObject *s;
339
340 if (i > 0) {
341 if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0)
342 goto error;
343 }
344
345 s = PyObject_Repr(v->ob_item[i]);
346 if (s == NULL)
347 goto error;
348
349 if (_PyUnicodeWriter_WriteStr(&writer, s) < 0) {
350 Py_DECREF(s);
351 goto error;
352 }
353 Py_DECREF(s);
354 }
355
356 writer.overallocate = 0;
357 if (n > 1) {
358 if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0)
359 goto error;
360 }
361 else {
362 if (_PyUnicodeWriter_WriteASCIIString(&writer, ",)", 2) < 0)
363 goto error;
364 }
365
366 Py_ReprLeave((PyObject *)v);
367 return _PyUnicodeWriter_Finish(&writer);
368
369 error:
370 _PyUnicodeWriter_Dealloc(&writer);
371 Py_ReprLeave((PyObject *)v);
372 return NULL;
373 }
374
375
376 /* Hash for tuples. This is a slightly simplified version of the xxHash
377 non-cryptographic hash:
378 - we do not use any parallellism, there is only 1 accumulator.
379 - we drop the final mixing since this is just a permutation of the
380 output space: it does not help against collisions.
381 - at the end, we mangle the length with a single constant.
382 For the xxHash specification, see
383 https://github.com/Cyan4973/xxHash/blob/master/doc/xxhash_spec.md
384
385 Below are the official constants from the xxHash specification. Optimizing
386 compilers should emit a single "rotate" instruction for the
387 _PyHASH_XXROTATE() expansion. If that doesn't happen for some important
388 platform, the macro could be changed to expand to a platform-specific rotate
389 spelling instead.
390 */
391 #if SIZEOF_PY_UHASH_T > 4
392 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)11400714785074694791ULL)
393 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)14029467366897019727ULL)
394 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)2870177450012600261ULL)
395 #define _PyHASH_XXROTATE(x) ((x << 31) | (x >> 33)) /* Rotate left 31 bits */
396 #else
397 #define _PyHASH_XXPRIME_1 ((Py_uhash_t)2654435761UL)
398 #define _PyHASH_XXPRIME_2 ((Py_uhash_t)2246822519UL)
399 #define _PyHASH_XXPRIME_5 ((Py_uhash_t)374761393UL)
400 #define _PyHASH_XXROTATE(x) ((x << 13) | (x >> 19)) /* Rotate left 13 bits */
401 #endif
402
403 /* Tests have shown that it's not worth to cache the hash value, see
404 https://bugs.python.org/issue9685 */
405 static Py_hash_t
tuplehash(PyTupleObject * v)406 tuplehash(PyTupleObject *v)
407 {
408 Py_ssize_t i, len = Py_SIZE(v);
409 PyObject **item = v->ob_item;
410
411 Py_uhash_t acc = _PyHASH_XXPRIME_5;
412 for (i = 0; i < len; i++) {
413 Py_uhash_t lane = PyObject_Hash(item[i]);
414 if (lane == (Py_uhash_t)-1) {
415 return -1;
416 }
417 acc += lane * _PyHASH_XXPRIME_2;
418 acc = _PyHASH_XXROTATE(acc);
419 acc *= _PyHASH_XXPRIME_1;
420 }
421
422 /* Add input length, mangled to keep the historical value of hash(()). */
423 acc += len ^ (_PyHASH_XXPRIME_5 ^ 3527539UL);
424
425 if (acc == (Py_uhash_t)-1) {
426 return 1546275796;
427 }
428 return acc;
429 }
430
431 static Py_ssize_t
tuplelength(PyTupleObject * a)432 tuplelength(PyTupleObject *a)
433 {
434 return Py_SIZE(a);
435 }
436
437 static int
tuplecontains(PyTupleObject * a,PyObject * el)438 tuplecontains(PyTupleObject *a, PyObject *el)
439 {
440 Py_ssize_t i;
441 int cmp;
442
443 for (i = 0, cmp = 0 ; cmp == 0 && i < Py_SIZE(a); ++i)
444 cmp = PyObject_RichCompareBool(PyTuple_GET_ITEM(a, i), el, Py_EQ);
445 return cmp;
446 }
447
448 static PyObject *
tupleitem(PyTupleObject * a,Py_ssize_t i)449 tupleitem(PyTupleObject *a, Py_ssize_t i)
450 {
451 if (i < 0 || i >= Py_SIZE(a)) {
452 PyErr_SetString(PyExc_IndexError, "tuple index out of range");
453 return NULL;
454 }
455 Py_INCREF(a->ob_item[i]);
456 return a->ob_item[i];
457 }
458
459 PyObject *
_PyTuple_FromArray(PyObject * const * src,Py_ssize_t n)460 _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n)
461 {
462 if (n == 0) {
463 return tuple_get_empty();
464 }
465
466 PyTupleObject *tuple = tuple_alloc(n);
467 if (tuple == NULL) {
468 return NULL;
469 }
470 PyObject **dst = tuple->ob_item;
471 for (Py_ssize_t i = 0; i < n; i++) {
472 PyObject *item = src[i];
473 Py_INCREF(item);
474 dst[i] = item;
475 }
476 tuple_gc_track(tuple);
477 return (PyObject *)tuple;
478 }
479
480 static PyObject *
tupleslice(PyTupleObject * a,Py_ssize_t ilow,Py_ssize_t ihigh)481 tupleslice(PyTupleObject *a, Py_ssize_t ilow,
482 Py_ssize_t ihigh)
483 {
484 if (ilow < 0)
485 ilow = 0;
486 if (ihigh > Py_SIZE(a))
487 ihigh = Py_SIZE(a);
488 if (ihigh < ilow)
489 ihigh = ilow;
490 if (ilow == 0 && ihigh == Py_SIZE(a) && PyTuple_CheckExact(a)) {
491 Py_INCREF(a);
492 return (PyObject *)a;
493 }
494 return _PyTuple_FromArray(a->ob_item + ilow, ihigh - ilow);
495 }
496
497 PyObject *
PyTuple_GetSlice(PyObject * op,Py_ssize_t i,Py_ssize_t j)498 PyTuple_GetSlice(PyObject *op, Py_ssize_t i, Py_ssize_t j)
499 {
500 if (op == NULL || !PyTuple_Check(op)) {
501 PyErr_BadInternalCall();
502 return NULL;
503 }
504 return tupleslice((PyTupleObject *)op, i, j);
505 }
506
507 static PyObject *
tupleconcat(PyTupleObject * a,PyObject * bb)508 tupleconcat(PyTupleObject *a, PyObject *bb)
509 {
510 Py_ssize_t size;
511 Py_ssize_t i;
512 PyObject **src, **dest;
513 PyTupleObject *np;
514 if (Py_SIZE(a) == 0 && PyTuple_CheckExact(bb)) {
515 Py_INCREF(bb);
516 return bb;
517 }
518 if (!PyTuple_Check(bb)) {
519 PyErr_Format(PyExc_TypeError,
520 "can only concatenate tuple (not \"%.200s\") to tuple",
521 Py_TYPE(bb)->tp_name);
522 return NULL;
523 }
524 PyTupleObject *b = (PyTupleObject *)bb;
525
526 if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) {
527 Py_INCREF(a);
528 return (PyObject *)a;
529 }
530 assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX);
531 size = Py_SIZE(a) + Py_SIZE(b);
532 if (size == 0) {
533 return tuple_get_empty();
534 }
535
536 np = tuple_alloc(size);
537 if (np == NULL) {
538 return NULL;
539 }
540 src = a->ob_item;
541 dest = np->ob_item;
542 for (i = 0; i < Py_SIZE(a); i++) {
543 PyObject *v = src[i];
544 Py_INCREF(v);
545 dest[i] = v;
546 }
547 src = b->ob_item;
548 dest = np->ob_item + Py_SIZE(a);
549 for (i = 0; i < Py_SIZE(b); i++) {
550 PyObject *v = src[i];
551 Py_INCREF(v);
552 dest[i] = v;
553 }
554 tuple_gc_track(np);
555 return (PyObject *)np;
556 }
557
558 static PyObject *
tuplerepeat(PyTupleObject * a,Py_ssize_t n)559 tuplerepeat(PyTupleObject *a, Py_ssize_t n)
560 {
561 Py_ssize_t i, j;
562 Py_ssize_t size;
563 PyTupleObject *np;
564 PyObject **p, **items;
565 if (Py_SIZE(a) == 0 || n == 1) {
566 if (PyTuple_CheckExact(a)) {
567 /* Since tuples are immutable, we can return a shared
568 copy in this case */
569 Py_INCREF(a);
570 return (PyObject *)a;
571 }
572 }
573 if (Py_SIZE(a) == 0 || n <= 0) {
574 return tuple_get_empty();
575 }
576 if (n > PY_SSIZE_T_MAX / Py_SIZE(a))
577 return PyErr_NoMemory();
578 size = Py_SIZE(a) * n;
579 np = tuple_alloc(size);
580 if (np == NULL)
581 return NULL;
582 p = np->ob_item;
583 items = a->ob_item;
584 for (i = 0; i < n; i++) {
585 for (j = 0; j < Py_SIZE(a); j++) {
586 *p = items[j];
587 Py_INCREF(*p);
588 p++;
589 }
590 }
591 tuple_gc_track(np);
592 return (PyObject *) np;
593 }
594
595 /*[clinic input]
596 tuple.index
597
598 value: object
599 start: slice_index(accept={int}) = 0
600 stop: slice_index(accept={int}, c_default="PY_SSIZE_T_MAX") = sys.maxsize
601 /
602
603 Return first index of value.
604
605 Raises ValueError if the value is not present.
606 [clinic start generated code]*/
607
608 static PyObject *
tuple_index_impl(PyTupleObject * self,PyObject * value,Py_ssize_t start,Py_ssize_t stop)609 tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start,
610 Py_ssize_t stop)
611 /*[clinic end generated code: output=07b6f9f3cb5c33eb input=fb39e9874a21fe3f]*/
612 {
613 Py_ssize_t i;
614
615 if (start < 0) {
616 start += Py_SIZE(self);
617 if (start < 0)
618 start = 0;
619 }
620 if (stop < 0) {
621 stop += Py_SIZE(self);
622 }
623 else if (stop > Py_SIZE(self)) {
624 stop = Py_SIZE(self);
625 }
626 for (i = start; i < stop; i++) {
627 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
628 if (cmp > 0)
629 return PyLong_FromSsize_t(i);
630 else if (cmp < 0)
631 return NULL;
632 }
633 PyErr_SetString(PyExc_ValueError, "tuple.index(x): x not in tuple");
634 return NULL;
635 }
636
637 /*[clinic input]
638 tuple.count
639
640 value: object
641 /
642
643 Return number of occurrences of value.
644 [clinic start generated code]*/
645
646 static PyObject *
tuple_count(PyTupleObject * self,PyObject * value)647 tuple_count(PyTupleObject *self, PyObject *value)
648 /*[clinic end generated code: output=aa927affc5a97605 input=531721aff65bd772]*/
649 {
650 Py_ssize_t count = 0;
651 Py_ssize_t i;
652
653 for (i = 0; i < Py_SIZE(self); i++) {
654 int cmp = PyObject_RichCompareBool(self->ob_item[i], value, Py_EQ);
655 if (cmp > 0)
656 count++;
657 else if (cmp < 0)
658 return NULL;
659 }
660 return PyLong_FromSsize_t(count);
661 }
662
663 static int
tupletraverse(PyTupleObject * o,visitproc visit,void * arg)664 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
665 {
666 Py_ssize_t i;
667
668 for (i = Py_SIZE(o); --i >= 0; )
669 Py_VISIT(o->ob_item[i]);
670 return 0;
671 }
672
673 static PyObject *
tuplerichcompare(PyObject * v,PyObject * w,int op)674 tuplerichcompare(PyObject *v, PyObject *w, int op)
675 {
676 PyTupleObject *vt, *wt;
677 Py_ssize_t i;
678 Py_ssize_t vlen, wlen;
679
680 if (!PyTuple_Check(v) || !PyTuple_Check(w))
681 Py_RETURN_NOTIMPLEMENTED;
682
683 vt = (PyTupleObject *)v;
684 wt = (PyTupleObject *)w;
685
686 vlen = Py_SIZE(vt);
687 wlen = Py_SIZE(wt);
688
689 /* Note: the corresponding code for lists has an "early out" test
690 * here when op is EQ or NE and the lengths differ. That pays there,
691 * but Tim was unable to find any real code where EQ/NE tuple
692 * compares don't have the same length, so testing for it here would
693 * have cost without benefit.
694 */
695
696 /* Search for the first index where items are different.
697 * Note that because tuples are immutable, it's safe to reuse
698 * vlen and wlen across the comparison calls.
699 */
700 for (i = 0; i < vlen && i < wlen; i++) {
701 int k = PyObject_RichCompareBool(vt->ob_item[i],
702 wt->ob_item[i], Py_EQ);
703 if (k < 0)
704 return NULL;
705 if (!k)
706 break;
707 }
708
709 if (i >= vlen || i >= wlen) {
710 /* No more items to compare -- compare sizes */
711 Py_RETURN_RICHCOMPARE(vlen, wlen, op);
712 }
713
714 /* We have an item that differs -- shortcuts for EQ/NE */
715 if (op == Py_EQ) {
716 Py_RETURN_FALSE;
717 }
718 if (op == Py_NE) {
719 Py_RETURN_TRUE;
720 }
721
722 /* Compare the final item again using the proper operator */
723 return PyObject_RichCompare(vt->ob_item[i], wt->ob_item[i], op);
724 }
725
726 static PyObject *
727 tuple_subtype_new(PyTypeObject *type, PyObject *iterable);
728
729 /*[clinic input]
730 @classmethod
731 tuple.__new__ as tuple_new
732 iterable: object(c_default="NULL") = ()
733 /
734
735 Built-in immutable sequence.
736
737 If no argument is given, the constructor returns an empty tuple.
738 If iterable is specified the tuple is initialized from iterable's items.
739
740 If the argument is a tuple, the return value is the same object.
741 [clinic start generated code]*/
742
743 static PyObject *
tuple_new_impl(PyTypeObject * type,PyObject * iterable)744 tuple_new_impl(PyTypeObject *type, PyObject *iterable)
745 /*[clinic end generated code: output=4546d9f0d469bce7 input=86963bcde633b5a2]*/
746 {
747 if (type != &PyTuple_Type)
748 return tuple_subtype_new(type, iterable);
749
750 if (iterable == NULL) {
751 return tuple_get_empty();
752 }
753 else {
754 return PySequence_Tuple(iterable);
755 }
756 }
757
758 static PyObject *
tuple_vectorcall(PyObject * type,PyObject * const * args,size_t nargsf,PyObject * kwnames)759 tuple_vectorcall(PyObject *type, PyObject * const*args,
760 size_t nargsf, PyObject *kwnames)
761 {
762 if (!_PyArg_NoKwnames("tuple", kwnames)) {
763 return NULL;
764 }
765
766 Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
767 if (!_PyArg_CheckPositional("tuple", nargs, 0, 1)) {
768 return NULL;
769 }
770
771 if (nargs) {
772 return tuple_new_impl((PyTypeObject *)type, args[0]);
773 }
774 else {
775 return tuple_get_empty();
776 }
777 }
778
779 static PyObject *
tuple_subtype_new(PyTypeObject * type,PyObject * iterable)780 tuple_subtype_new(PyTypeObject *type, PyObject *iterable)
781 {
782 PyObject *tmp, *newobj, *item;
783 Py_ssize_t i, n;
784
785 assert(PyType_IsSubtype(type, &PyTuple_Type));
786 tmp = tuple_new_impl(&PyTuple_Type, iterable);
787 if (tmp == NULL)
788 return NULL;
789 assert(PyTuple_Check(tmp));
790 newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
791 if (newobj == NULL) {
792 Py_DECREF(tmp);
793 return NULL;
794 }
795 for (i = 0; i < n; i++) {
796 item = PyTuple_GET_ITEM(tmp, i);
797 Py_INCREF(item);
798 PyTuple_SET_ITEM(newobj, i, item);
799 }
800 Py_DECREF(tmp);
801 return newobj;
802 }
803
804 static PySequenceMethods tuple_as_sequence = {
805 (lenfunc)tuplelength, /* sq_length */
806 (binaryfunc)tupleconcat, /* sq_concat */
807 (ssizeargfunc)tuplerepeat, /* sq_repeat */
808 (ssizeargfunc)tupleitem, /* sq_item */
809 0, /* sq_slice */
810 0, /* sq_ass_item */
811 0, /* sq_ass_slice */
812 (objobjproc)tuplecontains, /* sq_contains */
813 };
814
815 static PyObject*
tuplesubscript(PyTupleObject * self,PyObject * item)816 tuplesubscript(PyTupleObject* self, PyObject* item)
817 {
818 if (_PyIndex_Check(item)) {
819 Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError);
820 if (i == -1 && PyErr_Occurred())
821 return NULL;
822 if (i < 0)
823 i += PyTuple_GET_SIZE(self);
824 return tupleitem(self, i);
825 }
826 else if (PySlice_Check(item)) {
827 Py_ssize_t start, stop, step, slicelength, i;
828 size_t cur;
829 PyObject* it;
830 PyObject **src, **dest;
831
832 if (PySlice_Unpack(item, &start, &stop, &step) < 0) {
833 return NULL;
834 }
835 slicelength = PySlice_AdjustIndices(PyTuple_GET_SIZE(self), &start,
836 &stop, step);
837
838 if (slicelength <= 0) {
839 return tuple_get_empty();
840 }
841 else if (start == 0 && step == 1 &&
842 slicelength == PyTuple_GET_SIZE(self) &&
843 PyTuple_CheckExact(self)) {
844 Py_INCREF(self);
845 return (PyObject *)self;
846 }
847 else {
848 PyTupleObject* result = tuple_alloc(slicelength);
849 if (!result) return NULL;
850
851 src = self->ob_item;
852 dest = result->ob_item;
853 for (cur = start, i = 0; i < slicelength;
854 cur += step, i++) {
855 it = src[cur];
856 Py_INCREF(it);
857 dest[i] = it;
858 }
859
860 tuple_gc_track(result);
861 return (PyObject *)result;
862 }
863 }
864 else {
865 PyErr_Format(PyExc_TypeError,
866 "tuple indices must be integers or slices, not %.200s",
867 Py_TYPE(item)->tp_name);
868 return NULL;
869 }
870 }
871
872 /*[clinic input]
873 tuple.__getnewargs__
874 [clinic start generated code]*/
875
876 static PyObject *
tuple___getnewargs___impl(PyTupleObject * self)877 tuple___getnewargs___impl(PyTupleObject *self)
878 /*[clinic end generated code: output=25e06e3ee56027e2 input=1aeb4b286a21639a]*/
879 {
880 return Py_BuildValue("(N)", tupleslice(self, 0, Py_SIZE(self)));
881 }
882
883 static PyMethodDef tuple_methods[] = {
884 TUPLE___GETNEWARGS___METHODDEF
885 TUPLE_INDEX_METHODDEF
886 TUPLE_COUNT_METHODDEF
887 {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
888 {NULL, NULL} /* sentinel */
889 };
890
891 static PyMappingMethods tuple_as_mapping = {
892 (lenfunc)tuplelength,
893 (binaryfunc)tuplesubscript,
894 0
895 };
896
897 static PyObject *tuple_iter(PyObject *seq);
898
899 PyTypeObject PyTuple_Type = {
900 PyVarObject_HEAD_INIT(&PyType_Type, 0)
901 "tuple",
902 sizeof(PyTupleObject) - sizeof(PyObject *),
903 sizeof(PyObject *),
904 (destructor)tupledealloc, /* tp_dealloc */
905 0, /* tp_vectorcall_offset */
906 0, /* tp_getattr */
907 0, /* tp_setattr */
908 0, /* tp_as_async */
909 (reprfunc)tuplerepr, /* tp_repr */
910 0, /* tp_as_number */
911 &tuple_as_sequence, /* tp_as_sequence */
912 &tuple_as_mapping, /* tp_as_mapping */
913 (hashfunc)tuplehash, /* tp_hash */
914 0, /* tp_call */
915 0, /* tp_str */
916 PyObject_GenericGetAttr, /* tp_getattro */
917 0, /* tp_setattro */
918 0, /* tp_as_buffer */
919 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
920 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_TUPLE_SUBCLASS |
921 _Py_TPFLAGS_MATCH_SELF | Py_TPFLAGS_SEQUENCE, /* tp_flags */
922 tuple_new__doc__, /* tp_doc */
923 (traverseproc)tupletraverse, /* tp_traverse */
924 0, /* tp_clear */
925 tuplerichcompare, /* tp_richcompare */
926 0, /* tp_weaklistoffset */
927 tuple_iter, /* tp_iter */
928 0, /* tp_iternext */
929 tuple_methods, /* tp_methods */
930 0, /* tp_members */
931 0, /* tp_getset */
932 0, /* tp_base */
933 0, /* tp_dict */
934 0, /* tp_descr_get */
935 0, /* tp_descr_set */
936 0, /* tp_dictoffset */
937 0, /* tp_init */
938 0, /* tp_alloc */
939 tuple_new, /* tp_new */
940 PyObject_GC_Del, /* tp_free */
941 .tp_vectorcall = tuple_vectorcall,
942 };
943
944 /* The following function breaks the notion that tuples are immutable:
945 it changes the size of a tuple. We get away with this only if there
946 is only one module referencing the object. You can also think of it
947 as creating a new tuple object and destroying the old one, only more
948 efficiently. In any case, don't use this if the tuple may already be
949 known to some other part of the code. */
950
951 int
_PyTuple_Resize(PyObject ** pv,Py_ssize_t newsize)952 _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
953 {
954 PyTupleObject *v;
955 PyTupleObject *sv;
956 Py_ssize_t i;
957 Py_ssize_t oldsize;
958
959 v = (PyTupleObject *) *pv;
960 if (v == NULL || !Py_IS_TYPE(v, &PyTuple_Type) ||
961 (Py_SIZE(v) != 0 && Py_REFCNT(v) != 1)) {
962 *pv = 0;
963 Py_XDECREF(v);
964 PyErr_BadInternalCall();
965 return -1;
966 }
967 oldsize = Py_SIZE(v);
968 if (oldsize == newsize)
969 return 0;
970
971 if (oldsize == 0) {
972 /* Empty tuples are often shared, so we should never
973 resize them in-place even if we do own the only
974 (current) reference */
975 Py_DECREF(v);
976 *pv = PyTuple_New(newsize);
977 return *pv == NULL ? -1 : 0;
978 }
979
980 /* XXX UNREF/NEWREF interface should be more symmetrical */
981 #ifdef Py_REF_DEBUG
982 _Py_RefTotal--;
983 #endif
984 if (_PyObject_GC_IS_TRACKED(v)) {
985 _PyObject_GC_UNTRACK(v);
986 }
987 #ifdef Py_TRACE_REFS
988 _Py_ForgetReference((PyObject *) v);
989 #endif
990 /* DECREF items deleted by shrinkage */
991 for (i = newsize; i < oldsize; i++) {
992 Py_CLEAR(v->ob_item[i]);
993 }
994 sv = PyObject_GC_Resize(PyTupleObject, v, newsize);
995 if (sv == NULL) {
996 *pv = NULL;
997 PyObject_GC_Del(v);
998 return -1;
999 }
1000 _Py_NewReference((PyObject *) sv);
1001 /* Zero out items added by growing */
1002 if (newsize > oldsize)
1003 memset(&sv->ob_item[oldsize], 0,
1004 sizeof(*sv->ob_item) * (newsize - oldsize));
1005 *pv = (PyObject *) sv;
1006 _PyObject_GC_TRACK(sv);
1007 return 0;
1008 }
1009
1010 void
_PyTuple_ClearFreeList(PyInterpreterState * interp)1011 _PyTuple_ClearFreeList(PyInterpreterState *interp)
1012 {
1013 #if PyTuple_MAXSAVESIZE > 0
1014 struct _Py_tuple_state *state = &interp->tuple;
1015 for (Py_ssize_t i = 1; i < PyTuple_MAXSAVESIZE; i++) {
1016 PyTupleObject *p = state->free_list[i];
1017 state->free_list[i] = NULL;
1018 state->numfree[i] = 0;
1019 while (p) {
1020 PyTupleObject *q = p;
1021 p = (PyTupleObject *)(p->ob_item[0]);
1022 PyObject_GC_Del(q);
1023 }
1024 }
1025 // the empty tuple singleton is only cleared by _PyTuple_Fini()
1026 #endif
1027 }
1028
1029
1030 PyStatus
_PyTuple_Init(PyInterpreterState * interp)1031 _PyTuple_Init(PyInterpreterState *interp)
1032 {
1033 struct _Py_tuple_state *state = &interp->tuple;
1034 if (tuple_create_empty_tuple_singleton(state) < 0) {
1035 return _PyStatus_NO_MEMORY();
1036 }
1037 return _PyStatus_OK();
1038 }
1039
1040
1041 void
_PyTuple_Fini(PyInterpreterState * interp)1042 _PyTuple_Fini(PyInterpreterState *interp)
1043 {
1044 #if PyTuple_MAXSAVESIZE > 0
1045 struct _Py_tuple_state *state = &interp->tuple;
1046 // The empty tuple singleton must not be tracked by the GC
1047 assert(!_PyObject_GC_IS_TRACKED(state->free_list[0]));
1048 Py_CLEAR(state->free_list[0]);
1049 _PyTuple_ClearFreeList(interp);
1050 #ifdef Py_DEBUG
1051 state->numfree[0] = -1;
1052 #endif
1053 #endif
1054 }
1055
1056 /*********************** Tuple Iterator **************************/
1057
1058 typedef struct {
1059 PyObject_HEAD
1060 Py_ssize_t it_index;
1061 PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
1062 } tupleiterobject;
1063
1064 static void
tupleiter_dealloc(tupleiterobject * it)1065 tupleiter_dealloc(tupleiterobject *it)
1066 {
1067 _PyObject_GC_UNTRACK(it);
1068 Py_XDECREF(it->it_seq);
1069 PyObject_GC_Del(it);
1070 }
1071
1072 static int
tupleiter_traverse(tupleiterobject * it,visitproc visit,void * arg)1073 tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
1074 {
1075 Py_VISIT(it->it_seq);
1076 return 0;
1077 }
1078
1079 static PyObject *
tupleiter_next(tupleiterobject * it)1080 tupleiter_next(tupleiterobject *it)
1081 {
1082 PyTupleObject *seq;
1083 PyObject *item;
1084
1085 assert(it != NULL);
1086 seq = it->it_seq;
1087 if (seq == NULL)
1088 return NULL;
1089 assert(PyTuple_Check(seq));
1090
1091 if (it->it_index < PyTuple_GET_SIZE(seq)) {
1092 item = PyTuple_GET_ITEM(seq, it->it_index);
1093 ++it->it_index;
1094 Py_INCREF(item);
1095 return item;
1096 }
1097
1098 it->it_seq = NULL;
1099 Py_DECREF(seq);
1100 return NULL;
1101 }
1102
1103 static PyObject *
tupleiter_len(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1104 tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1105 {
1106 Py_ssize_t len = 0;
1107 if (it->it_seq)
1108 len = PyTuple_GET_SIZE(it->it_seq) - it->it_index;
1109 return PyLong_FromSsize_t(len);
1110 }
1111
1112 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
1113
1114 static PyObject *
tupleiter_reduce(tupleiterobject * it,PyObject * Py_UNUSED (ignored))1115 tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
1116 {
1117 _Py_IDENTIFIER(iter);
1118 if (it->it_seq)
1119 return Py_BuildValue("N(O)n", _PyEval_GetBuiltinId(&PyId_iter),
1120 it->it_seq, it->it_index);
1121 else
1122 return Py_BuildValue("N(())", _PyEval_GetBuiltinId(&PyId_iter));
1123 }
1124
1125 static PyObject *
tupleiter_setstate(tupleiterobject * it,PyObject * state)1126 tupleiter_setstate(tupleiterobject *it, PyObject *state)
1127 {
1128 Py_ssize_t index = PyLong_AsSsize_t(state);
1129 if (index == -1 && PyErr_Occurred())
1130 return NULL;
1131 if (it->it_seq != NULL) {
1132 if (index < 0)
1133 index = 0;
1134 else if (index > PyTuple_GET_SIZE(it->it_seq))
1135 index = PyTuple_GET_SIZE(it->it_seq); /* exhausted iterator */
1136 it->it_index = index;
1137 }
1138 Py_RETURN_NONE;
1139 }
1140
1141 PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
1142 PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
1143
1144 static PyMethodDef tupleiter_methods[] = {
1145 {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
1146 {"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
1147 {"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
1148 {NULL, NULL} /* sentinel */
1149 };
1150
1151 PyTypeObject PyTupleIter_Type = {
1152 PyVarObject_HEAD_INIT(&PyType_Type, 0)
1153 "tuple_iterator", /* tp_name */
1154 sizeof(tupleiterobject), /* tp_basicsize */
1155 0, /* tp_itemsize */
1156 /* methods */
1157 (destructor)tupleiter_dealloc, /* tp_dealloc */
1158 0, /* tp_vectorcall_offset */
1159 0, /* tp_getattr */
1160 0, /* tp_setattr */
1161 0, /* tp_as_async */
1162 0, /* tp_repr */
1163 0, /* tp_as_number */
1164 0, /* tp_as_sequence */
1165 0, /* tp_as_mapping */
1166 0, /* tp_hash */
1167 0, /* tp_call */
1168 0, /* tp_str */
1169 PyObject_GenericGetAttr, /* tp_getattro */
1170 0, /* tp_setattro */
1171 0, /* tp_as_buffer */
1172 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
1173 0, /* tp_doc */
1174 (traverseproc)tupleiter_traverse, /* tp_traverse */
1175 0, /* tp_clear */
1176 0, /* tp_richcompare */
1177 0, /* tp_weaklistoffset */
1178 PyObject_SelfIter, /* tp_iter */
1179 (iternextfunc)tupleiter_next, /* tp_iternext */
1180 tupleiter_methods, /* tp_methods */
1181 0,
1182 };
1183
1184 static PyObject *
tuple_iter(PyObject * seq)1185 tuple_iter(PyObject *seq)
1186 {
1187 tupleiterobject *it;
1188
1189 if (!PyTuple_Check(seq)) {
1190 PyErr_BadInternalCall();
1191 return NULL;
1192 }
1193 it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
1194 if (it == NULL)
1195 return NULL;
1196 it->it_index = 0;
1197 Py_INCREF(seq);
1198 it->it_seq = (PyTupleObject *)seq;
1199 _PyObject_GC_TRACK(it);
1200 return (PyObject *)it;
1201 }
1202