1 #include "Python.h"
2 #include "pycore_call.h" // _PyObject_CallNoArgs()
3 #include "pycore_dict.h" // _PyDict_Pop_KnownHash()
4 #include "pycore_long.h" // _PyLong_GetZero()
5 #include "pycore_moduleobject.h" // _PyModule_GetState()
6 #include "pycore_object.h" // _PyObject_GC_TRACK
7 #include "pycore_pystate.h" // _PyThreadState_GET()
8 #include "pycore_tuple.h" // _PyTuple_ITEMS()
9
10
11 #include "clinic/_functoolsmodule.c.h"
12 /*[clinic input]
13 module _functools
14 class _functools._lru_cache_wrapper "PyObject *" "&lru_cache_type_spec"
15 [clinic start generated code]*/
16 /*[clinic end generated code: output=da39a3ee5e6b4b0d input=bece4053896b09c0]*/
17
18 /* _functools module written and maintained
19 by Hye-Shik Chang <perky@FreeBSD.org>
20 with adaptations by Raymond Hettinger <python@rcn.com>
21 Copyright (c) 2004, 2005, 2006 Python Software Foundation.
22 All rights reserved.
23 */
24
25 typedef struct _functools_state {
26 /* this object is used delimit args and keywords in the cache keys */
27 PyObject *kwd_mark;
28 PyTypeObject *partial_type;
29 PyTypeObject *keyobject_type;
30 PyTypeObject *lru_list_elem_type;
31 } _functools_state;
32
33 static inline _functools_state *
get_functools_state(PyObject * module)34 get_functools_state(PyObject *module)
35 {
36 void *state = _PyModule_GetState(module);
37 assert(state != NULL);
38 return (_functools_state *)state;
39 }
40
41
42 /* partial object **********************************************************/
43
44 typedef struct {
45 PyObject_HEAD
46 PyObject *fn;
47 PyObject *args;
48 PyObject *kw;
49 PyObject *dict; /* __dict__ */
50 PyObject *weakreflist; /* List of weak references */
51 vectorcallfunc vectorcall;
52 } partialobject;
53
54 static void partial_setvectorcall(partialobject *pto);
55 static struct PyModuleDef _functools_module;
56 static PyObject *
57 partial_call(partialobject *pto, PyObject *args, PyObject *kwargs);
58
59 static inline _functools_state *
get_functools_state_by_type(PyTypeObject * type)60 get_functools_state_by_type(PyTypeObject *type)
61 {
62 PyObject *module = PyType_GetModuleByDef(type, &_functools_module);
63 if (module == NULL) {
64 return NULL;
65 }
66 return get_functools_state(module);
67 }
68
69 // Not converted to argument clinic, because of `*args, **kwargs` arguments.
70 static PyObject *
partial_new(PyTypeObject * type,PyObject * args,PyObject * kw)71 partial_new(PyTypeObject *type, PyObject *args, PyObject *kw)
72 {
73 PyObject *func, *pargs, *nargs, *pkw;
74 partialobject *pto;
75
76 if (PyTuple_GET_SIZE(args) < 1) {
77 PyErr_SetString(PyExc_TypeError,
78 "type 'partial' takes at least one argument");
79 return NULL;
80 }
81
82 _functools_state *state = get_functools_state_by_type(type);
83 if (state == NULL) {
84 return NULL;
85 }
86
87 pargs = pkw = NULL;
88 func = PyTuple_GET_ITEM(args, 0);
89
90 int res = PyObject_TypeCheck(func, state->partial_type);
91 if (res == -1) {
92 return NULL;
93 }
94 if (res == 1) {
95 // We can use its underlying function directly and merge the arguments.
96 partialobject *part = (partialobject *)func;
97 if (part->dict == NULL) {
98 pargs = part->args;
99 pkw = part->kw;
100 func = part->fn;
101 assert(PyTuple_Check(pargs));
102 assert(PyDict_Check(pkw));
103 }
104 }
105 if (!PyCallable_Check(func)) {
106 PyErr_SetString(PyExc_TypeError,
107 "the first argument must be callable");
108 return NULL;
109 }
110
111 /* create partialobject structure */
112 pto = (partialobject *)type->tp_alloc(type, 0);
113 if (pto == NULL)
114 return NULL;
115
116 pto->fn = Py_NewRef(func);
117
118 nargs = PyTuple_GetSlice(args, 1, PY_SSIZE_T_MAX);
119 if (nargs == NULL) {
120 Py_DECREF(pto);
121 return NULL;
122 }
123 if (pargs == NULL) {
124 pto->args = nargs;
125 }
126 else {
127 pto->args = PySequence_Concat(pargs, nargs);
128 Py_DECREF(nargs);
129 if (pto->args == NULL) {
130 Py_DECREF(pto);
131 return NULL;
132 }
133 assert(PyTuple_Check(pto->args));
134 }
135
136 if (pkw == NULL || PyDict_GET_SIZE(pkw) == 0) {
137 if (kw == NULL) {
138 pto->kw = PyDict_New();
139 }
140 else if (Py_REFCNT(kw) == 1) {
141 pto->kw = Py_NewRef(kw);
142 }
143 else {
144 pto->kw = PyDict_Copy(kw);
145 }
146 }
147 else {
148 pto->kw = PyDict_Copy(pkw);
149 if (kw != NULL && pto->kw != NULL) {
150 if (PyDict_Merge(pto->kw, kw, 1) != 0) {
151 Py_DECREF(pto);
152 return NULL;
153 }
154 }
155 }
156 if (pto->kw == NULL) {
157 Py_DECREF(pto);
158 return NULL;
159 }
160
161 partial_setvectorcall(pto);
162 return (PyObject *)pto;
163 }
164
165 static int
partial_clear(partialobject * pto)166 partial_clear(partialobject *pto)
167 {
168 Py_CLEAR(pto->fn);
169 Py_CLEAR(pto->args);
170 Py_CLEAR(pto->kw);
171 Py_CLEAR(pto->dict);
172 return 0;
173 }
174
175 static int
partial_traverse(partialobject * pto,visitproc visit,void * arg)176 partial_traverse(partialobject *pto, visitproc visit, void *arg)
177 {
178 Py_VISIT(Py_TYPE(pto));
179 Py_VISIT(pto->fn);
180 Py_VISIT(pto->args);
181 Py_VISIT(pto->kw);
182 Py_VISIT(pto->dict);
183 return 0;
184 }
185
186 static void
partial_dealloc(partialobject * pto)187 partial_dealloc(partialobject *pto)
188 {
189 PyTypeObject *tp = Py_TYPE(pto);
190 /* bpo-31095: UnTrack is needed before calling any callbacks */
191 PyObject_GC_UnTrack(pto);
192 if (pto->weakreflist != NULL) {
193 PyObject_ClearWeakRefs((PyObject *) pto);
194 }
195 (void)partial_clear(pto);
196 tp->tp_free(pto);
197 Py_DECREF(tp);
198 }
199
200 static PyObject *
partial_descr_get(PyObject * self,PyObject * obj,PyObject * type)201 partial_descr_get(PyObject *self, PyObject *obj, PyObject *type)
202 {
203 if (obj == Py_None || obj == NULL) {
204 return Py_NewRef(self);
205 }
206 if (PyErr_WarnEx(PyExc_FutureWarning,
207 "functools.partial will be a method descriptor in "
208 "future Python versions; wrap it in staticmethod() "
209 "if you want to preserve the old behavior", 1) < 0)
210 {
211 return NULL;
212 }
213 return Py_NewRef(self);
214 }
215
216 /* Merging keyword arguments using the vectorcall convention is messy, so
217 * if we would need to do that, we stop using vectorcall and fall back
218 * to using partial_call() instead. */
219 Py_NO_INLINE static PyObject *
partial_vectorcall_fallback(PyThreadState * tstate,partialobject * pto,PyObject * const * args,size_t nargsf,PyObject * kwnames)220 partial_vectorcall_fallback(PyThreadState *tstate, partialobject *pto,
221 PyObject *const *args, size_t nargsf,
222 PyObject *kwnames)
223 {
224 pto->vectorcall = NULL;
225 Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
226 return _PyObject_MakeTpCall(tstate, (PyObject *)pto,
227 args, nargs, kwnames);
228 }
229
230 static PyObject *
partial_vectorcall(partialobject * pto,PyObject * const * args,size_t nargsf,PyObject * kwnames)231 partial_vectorcall(partialobject *pto, PyObject *const *args,
232 size_t nargsf, PyObject *kwnames)
233 {
234 PyThreadState *tstate = _PyThreadState_GET();
235
236 /* pto->kw is mutable, so need to check every time */
237 if (PyDict_GET_SIZE(pto->kw)) {
238 return partial_vectorcall_fallback(tstate, pto, args, nargsf, kwnames);
239 }
240
241 Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
242 Py_ssize_t nargs_total = nargs;
243 if (kwnames != NULL) {
244 nargs_total += PyTuple_GET_SIZE(kwnames);
245 }
246
247 PyObject **pto_args = _PyTuple_ITEMS(pto->args);
248 Py_ssize_t pto_nargs = PyTuple_GET_SIZE(pto->args);
249
250 /* Fast path if we're called without arguments */
251 if (nargs_total == 0) {
252 return _PyObject_VectorcallTstate(tstate, pto->fn,
253 pto_args, pto_nargs, NULL);
254 }
255
256 /* Fast path using PY_VECTORCALL_ARGUMENTS_OFFSET to prepend a single
257 * positional argument */
258 if (pto_nargs == 1 && (nargsf & PY_VECTORCALL_ARGUMENTS_OFFSET)) {
259 PyObject **newargs = (PyObject **)args - 1;
260 PyObject *tmp = newargs[0];
261 newargs[0] = pto_args[0];
262 PyObject *ret = _PyObject_VectorcallTstate(tstate, pto->fn,
263 newargs, nargs + 1, kwnames);
264 newargs[0] = tmp;
265 return ret;
266 }
267
268 Py_ssize_t newnargs_total = pto_nargs + nargs_total;
269
270 PyObject *small_stack[_PY_FASTCALL_SMALL_STACK];
271 PyObject *ret;
272 PyObject **stack;
273
274 if (newnargs_total <= (Py_ssize_t)Py_ARRAY_LENGTH(small_stack)) {
275 stack = small_stack;
276 }
277 else {
278 stack = PyMem_Malloc(newnargs_total * sizeof(PyObject *));
279 if (stack == NULL) {
280 PyErr_NoMemory();
281 return NULL;
282 }
283 }
284
285 /* Copy to new stack, using borrowed references */
286 memcpy(stack, pto_args, pto_nargs * sizeof(PyObject*));
287 memcpy(stack + pto_nargs, args, nargs_total * sizeof(PyObject*));
288
289 ret = _PyObject_VectorcallTstate(tstate, pto->fn,
290 stack, pto_nargs + nargs, kwnames);
291 if (stack != small_stack) {
292 PyMem_Free(stack);
293 }
294 return ret;
295 }
296
297 /* Set pto->vectorcall depending on the parameters of the partial object */
298 static void
partial_setvectorcall(partialobject * pto)299 partial_setvectorcall(partialobject *pto)
300 {
301 if (PyVectorcall_Function(pto->fn) == NULL) {
302 /* Don't use vectorcall if the underlying function doesn't support it */
303 pto->vectorcall = NULL;
304 }
305 /* We could have a special case if there are no arguments,
306 * but that is unlikely (why use partial without arguments?),
307 * so we don't optimize that */
308 else {
309 pto->vectorcall = (vectorcallfunc)partial_vectorcall;
310 }
311 }
312
313
314 // Not converted to argument clinic, because of `*args, **kwargs` arguments.
315 static PyObject *
partial_call(partialobject * pto,PyObject * args,PyObject * kwargs)316 partial_call(partialobject *pto, PyObject *args, PyObject *kwargs)
317 {
318 assert(PyCallable_Check(pto->fn));
319 assert(PyTuple_Check(pto->args));
320 assert(PyDict_Check(pto->kw));
321
322 /* Merge keywords */
323 PyObject *kwargs2;
324 if (PyDict_GET_SIZE(pto->kw) == 0) {
325 /* kwargs can be NULL */
326 kwargs2 = Py_XNewRef(kwargs);
327 }
328 else {
329 /* bpo-27840, bpo-29318: dictionary of keyword parameters must be
330 copied, because a function using "**kwargs" can modify the
331 dictionary. */
332 kwargs2 = PyDict_Copy(pto->kw);
333 if (kwargs2 == NULL) {
334 return NULL;
335 }
336
337 if (kwargs != NULL) {
338 if (PyDict_Merge(kwargs2, kwargs, 1) != 0) {
339 Py_DECREF(kwargs2);
340 return NULL;
341 }
342 }
343 }
344
345 /* Merge positional arguments */
346 /* Note: tupleconcat() is optimized for empty tuples */
347 PyObject *args2 = PySequence_Concat(pto->args, args);
348 if (args2 == NULL) {
349 Py_XDECREF(kwargs2);
350 return NULL;
351 }
352
353 PyObject *res = PyObject_Call(pto->fn, args2, kwargs2);
354 Py_DECREF(args2);
355 Py_XDECREF(kwargs2);
356 return res;
357 }
358
359 PyDoc_STRVAR(partial_doc,
360 "partial(func, /, *args, **keywords)\n--\n\n\
361 Create a new function with partial application of the given arguments\n\
362 and keywords.");
363
364 #define OFF(x) offsetof(partialobject, x)
365 static PyMemberDef partial_memberlist[] = {
366 {"func", _Py_T_OBJECT, OFF(fn), Py_READONLY,
367 "function object to use in future partial calls"},
368 {"args", _Py_T_OBJECT, OFF(args), Py_READONLY,
369 "tuple of arguments to future partial calls"},
370 {"keywords", _Py_T_OBJECT, OFF(kw), Py_READONLY,
371 "dictionary of keyword arguments to future partial calls"},
372 {"__weaklistoffset__", Py_T_PYSSIZET,
373 offsetof(partialobject, weakreflist), Py_READONLY},
374 {"__dictoffset__", Py_T_PYSSIZET,
375 offsetof(partialobject, dict), Py_READONLY},
376 {"__vectorcalloffset__", Py_T_PYSSIZET,
377 offsetof(partialobject, vectorcall), Py_READONLY},
378 {NULL} /* Sentinel */
379 };
380
381 static PyGetSetDef partial_getsetlist[] = {
382 {"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},
383 {NULL} /* Sentinel */
384 };
385
386 static PyObject *
partial_repr(partialobject * pto)387 partial_repr(partialobject *pto)
388 {
389 PyObject *result = NULL;
390 PyObject *arglist;
391 PyObject *mod;
392 PyObject *name;
393 Py_ssize_t i, n;
394 PyObject *key, *value;
395 int status;
396
397 status = Py_ReprEnter((PyObject *)pto);
398 if (status != 0) {
399 if (status < 0)
400 return NULL;
401 return PyUnicode_FromString("...");
402 }
403
404 arglist = PyUnicode_FromString("");
405 if (arglist == NULL)
406 goto done;
407 /* Pack positional arguments */
408 assert (PyTuple_Check(pto->args));
409 n = PyTuple_GET_SIZE(pto->args);
410 for (i = 0; i < n; i++) {
411 Py_SETREF(arglist, PyUnicode_FromFormat("%U, %R", arglist,
412 PyTuple_GET_ITEM(pto->args, i)));
413 if (arglist == NULL)
414 goto done;
415 }
416 /* Pack keyword arguments */
417 assert (PyDict_Check(pto->kw));
418 for (i = 0; PyDict_Next(pto->kw, &i, &key, &value);) {
419 /* Prevent key.__str__ from deleting the value. */
420 Py_INCREF(value);
421 Py_SETREF(arglist, PyUnicode_FromFormat("%U, %S=%R", arglist,
422 key, value));
423 Py_DECREF(value);
424 if (arglist == NULL)
425 goto done;
426 }
427
428 mod = PyType_GetModuleName(Py_TYPE(pto));
429 if (mod == NULL) {
430 goto error;
431 }
432 name = PyType_GetQualName(Py_TYPE(pto));
433 if (name == NULL) {
434 Py_DECREF(mod);
435 goto error;
436 }
437 result = PyUnicode_FromFormat("%S.%S(%R%U)", mod, name, pto->fn, arglist);
438 Py_DECREF(mod);
439 Py_DECREF(name);
440 Py_DECREF(arglist);
441
442 done:
443 Py_ReprLeave((PyObject *)pto);
444 return result;
445 error:
446 Py_DECREF(arglist);
447 Py_ReprLeave((PyObject *)pto);
448 return NULL;
449 }
450
451 /* Pickle strategy:
452 __reduce__ by itself doesn't support getting kwargs in the unpickle
453 operation so we define a __setstate__ that replaces all the information
454 about the partial. If we only replaced part of it someone would use
455 it as a hook to do strange things.
456 */
457
458 static PyObject *
partial_reduce(partialobject * pto,PyObject * unused)459 partial_reduce(partialobject *pto, PyObject *unused)
460 {
461 return Py_BuildValue("O(O)(OOOO)", Py_TYPE(pto), pto->fn, pto->fn,
462 pto->args, pto->kw,
463 pto->dict ? pto->dict : Py_None);
464 }
465
466 static PyObject *
partial_setstate(partialobject * pto,PyObject * state)467 partial_setstate(partialobject *pto, PyObject *state)
468 {
469 PyObject *fn, *fnargs, *kw, *dict;
470
471 if (!PyTuple_Check(state) ||
472 !PyArg_ParseTuple(state, "OOOO", &fn, &fnargs, &kw, &dict) ||
473 !PyCallable_Check(fn) ||
474 !PyTuple_Check(fnargs) ||
475 (kw != Py_None && !PyDict_Check(kw)))
476 {
477 PyErr_SetString(PyExc_TypeError, "invalid partial state");
478 return NULL;
479 }
480
481 if(!PyTuple_CheckExact(fnargs))
482 fnargs = PySequence_Tuple(fnargs);
483 else
484 Py_INCREF(fnargs);
485 if (fnargs == NULL)
486 return NULL;
487
488 if (kw == Py_None)
489 kw = PyDict_New();
490 else if(!PyDict_CheckExact(kw))
491 kw = PyDict_Copy(kw);
492 else
493 Py_INCREF(kw);
494 if (kw == NULL) {
495 Py_DECREF(fnargs);
496 return NULL;
497 }
498
499 if (dict == Py_None)
500 dict = NULL;
501 else
502 Py_INCREF(dict);
503
504 Py_SETREF(pto->fn, Py_NewRef(fn));
505 Py_SETREF(pto->args, fnargs);
506 Py_SETREF(pto->kw, kw);
507 Py_XSETREF(pto->dict, dict);
508 partial_setvectorcall(pto);
509 Py_RETURN_NONE;
510 }
511
512 static PyMethodDef partial_methods[] = {
513 {"__reduce__", (PyCFunction)partial_reduce, METH_NOARGS},
514 {"__setstate__", (PyCFunction)partial_setstate, METH_O},
515 {"__class_getitem__", Py_GenericAlias,
516 METH_O|METH_CLASS, PyDoc_STR("See PEP 585")},
517 {NULL, NULL} /* sentinel */
518 };
519
520 static PyType_Slot partial_type_slots[] = {
521 {Py_tp_dealloc, partial_dealloc},
522 {Py_tp_repr, partial_repr},
523 {Py_tp_call, partial_call},
524 {Py_tp_getattro, PyObject_GenericGetAttr},
525 {Py_tp_setattro, PyObject_GenericSetAttr},
526 {Py_tp_doc, (void *)partial_doc},
527 {Py_tp_traverse, partial_traverse},
528 {Py_tp_clear, partial_clear},
529 {Py_tp_methods, partial_methods},
530 {Py_tp_members, partial_memberlist},
531 {Py_tp_getset, partial_getsetlist},
532 {Py_tp_descr_get, (descrgetfunc)partial_descr_get},
533 {Py_tp_new, partial_new},
534 {Py_tp_free, PyObject_GC_Del},
535 {0, 0}
536 };
537
538 static PyType_Spec partial_type_spec = {
539 .name = "functools.partial",
540 .basicsize = sizeof(partialobject),
541 .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
542 Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_VECTORCALL |
543 Py_TPFLAGS_IMMUTABLETYPE,
544 .slots = partial_type_slots
545 };
546
547
548 /* cmp_to_key ***************************************************************/
549
550 typedef struct {
551 PyObject_HEAD
552 PyObject *cmp;
553 PyObject *object;
554 } keyobject;
555
556 static int
keyobject_clear(keyobject * ko)557 keyobject_clear(keyobject *ko)
558 {
559 Py_CLEAR(ko->cmp);
560 Py_CLEAR(ko->object);
561 return 0;
562 }
563
564 static void
keyobject_dealloc(keyobject * ko)565 keyobject_dealloc(keyobject *ko)
566 {
567 PyTypeObject *tp = Py_TYPE(ko);
568 PyObject_GC_UnTrack(ko);
569 (void)keyobject_clear(ko);
570 tp->tp_free(ko);
571 Py_DECREF(tp);
572 }
573
574 static int
keyobject_traverse(keyobject * ko,visitproc visit,void * arg)575 keyobject_traverse(keyobject *ko, visitproc visit, void *arg)
576 {
577 Py_VISIT(Py_TYPE(ko));
578 Py_VISIT(ko->cmp);
579 Py_VISIT(ko->object);
580 return 0;
581 }
582
583 static PyMemberDef keyobject_members[] = {
584 {"obj", _Py_T_OBJECT,
585 offsetof(keyobject, object), 0,
586 PyDoc_STR("Value wrapped by a key function.")},
587 {NULL}
588 };
589
590 static PyObject *
keyobject_text_signature(PyObject * self,void * Py_UNUSED (ignored))591 keyobject_text_signature(PyObject *self, void *Py_UNUSED(ignored))
592 {
593 return PyUnicode_FromString("(obj)");
594 }
595
596 static PyGetSetDef keyobject_getset[] = {
597 {"__text_signature__", keyobject_text_signature, (setter)NULL},
598 {NULL}
599 };
600
601 static PyObject *
602 keyobject_call(keyobject *ko, PyObject *args, PyObject *kwds);
603
604 static PyObject *
605 keyobject_richcompare(PyObject *ko, PyObject *other, int op);
606
607 static PyType_Slot keyobject_type_slots[] = {
608 {Py_tp_dealloc, keyobject_dealloc},
609 {Py_tp_call, keyobject_call},
610 {Py_tp_getattro, PyObject_GenericGetAttr},
611 {Py_tp_traverse, keyobject_traverse},
612 {Py_tp_clear, keyobject_clear},
613 {Py_tp_richcompare, keyobject_richcompare},
614 {Py_tp_members, keyobject_members},
615 {Py_tp_getset, keyobject_getset},
616 {0, 0}
617 };
618
619 static PyType_Spec keyobject_type_spec = {
620 .name = "functools.KeyWrapper",
621 .basicsize = sizeof(keyobject),
622 .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION |
623 Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE),
624 .slots = keyobject_type_slots
625 };
626
627 static PyObject *
keyobject_call(keyobject * ko,PyObject * args,PyObject * kwds)628 keyobject_call(keyobject *ko, PyObject *args, PyObject *kwds)
629 {
630 PyObject *object;
631 keyobject *result;
632 static char *kwargs[] = {"obj", NULL};
633
634 if (!PyArg_ParseTupleAndKeywords(args, kwds, "O:K", kwargs, &object))
635 return NULL;
636
637 result = PyObject_GC_New(keyobject, Py_TYPE(ko));
638 if (result == NULL) {
639 return NULL;
640 }
641 result->cmp = Py_NewRef(ko->cmp);
642 result->object = Py_NewRef(object);
643 PyObject_GC_Track(result);
644 return (PyObject *)result;
645 }
646
647 static PyObject *
keyobject_richcompare(PyObject * ko,PyObject * other,int op)648 keyobject_richcompare(PyObject *ko, PyObject *other, int op)
649 {
650 if (!Py_IS_TYPE(other, Py_TYPE(ko))) {
651 PyErr_Format(PyExc_TypeError, "other argument must be K instance");
652 return NULL;
653 }
654
655 PyObject *compare = ((keyobject *) ko)->cmp;
656 assert(compare != NULL);
657 PyObject *x = ((keyobject *) ko)->object;
658 PyObject *y = ((keyobject *) other)->object;
659 if (!x || !y){
660 PyErr_Format(PyExc_AttributeError, "object");
661 return NULL;
662 }
663
664 /* Call the user's comparison function and translate the 3-way
665 * result into true or false (or error).
666 */
667 PyObject* args[2] = {x, y};
668 PyObject *res = PyObject_Vectorcall(compare, args, 2, NULL);
669 if (res == NULL) {
670 return NULL;
671 }
672
673 PyObject *answer = PyObject_RichCompare(res, _PyLong_GetZero(), op);
674 Py_DECREF(res);
675 return answer;
676 }
677
678 /*[clinic input]
679 _functools.cmp_to_key
680
681 mycmp: object
682 Function that compares two objects.
683
684 Convert a cmp= function into a key= function.
685 [clinic start generated code]*/
686
687 static PyObject *
_functools_cmp_to_key_impl(PyObject * module,PyObject * mycmp)688 _functools_cmp_to_key_impl(PyObject *module, PyObject *mycmp)
689 /*[clinic end generated code: output=71eaad0f4fc81f33 input=d1b76f231c0dfeb3]*/
690 {
691 keyobject *object;
692 _functools_state *state;
693
694 state = get_functools_state(module);
695 object = PyObject_GC_New(keyobject, state->keyobject_type);
696 if (!object)
697 return NULL;
698 object->cmp = Py_NewRef(mycmp);
699 object->object = NULL;
700 PyObject_GC_Track(object);
701 return (PyObject *)object;
702 }
703
704 /* reduce (used to be a builtin) ********************************************/
705
706 // Not converted to argument clinic, because of `args` in-place modification.
707 // AC will affect performance.
708 static PyObject *
functools_reduce(PyObject * self,PyObject * args)709 functools_reduce(PyObject *self, PyObject *args)
710 {
711 PyObject *seq, *func, *result = NULL, *it;
712
713 if (!PyArg_UnpackTuple(args, "reduce", 2, 3, &func, &seq, &result))
714 return NULL;
715 if (result != NULL)
716 Py_INCREF(result);
717
718 it = PyObject_GetIter(seq);
719 if (it == NULL) {
720 if (PyErr_ExceptionMatches(PyExc_TypeError))
721 PyErr_SetString(PyExc_TypeError,
722 "reduce() arg 2 must support iteration");
723 Py_XDECREF(result);
724 return NULL;
725 }
726
727 if ((args = PyTuple_New(2)) == NULL)
728 goto Fail;
729
730 for (;;) {
731 PyObject *op2;
732
733 if (Py_REFCNT(args) > 1) {
734 Py_DECREF(args);
735 if ((args = PyTuple_New(2)) == NULL)
736 goto Fail;
737 }
738
739 op2 = PyIter_Next(it);
740 if (op2 == NULL) {
741 if (PyErr_Occurred())
742 goto Fail;
743 break;
744 }
745
746 if (result == NULL)
747 result = op2;
748 else {
749 /* Update the args tuple in-place */
750 assert(Py_REFCNT(args) == 1);
751 Py_XSETREF(_PyTuple_ITEMS(args)[0], result);
752 Py_XSETREF(_PyTuple_ITEMS(args)[1], op2);
753 if ((result = PyObject_Call(func, args, NULL)) == NULL) {
754 goto Fail;
755 }
756 // bpo-42536: The GC may have untracked this args tuple. Since we're
757 // recycling it, make sure it's tracked again:
758 if (!_PyObject_GC_IS_TRACKED(args)) {
759 _PyObject_GC_TRACK(args);
760 }
761 }
762 }
763
764 Py_DECREF(args);
765
766 if (result == NULL)
767 PyErr_SetString(PyExc_TypeError,
768 "reduce() of empty iterable with no initial value");
769
770 Py_DECREF(it);
771 return result;
772
773 Fail:
774 Py_XDECREF(args);
775 Py_XDECREF(result);
776 Py_DECREF(it);
777 return NULL;
778 }
779
780 PyDoc_STRVAR(functools_reduce_doc,
781 "reduce(function, iterable[, initial], /) -> value\n\
782 \n\
783 Apply a function of two arguments cumulatively to the items of an iterable, from left to right.\n\
784 \n\
785 This effectively reduces the iterable to a single value. If initial is present,\n\
786 it is placed before the items of the iterable in the calculation, and serves as\n\
787 a default when the iterable is empty.\n\
788 \n\
789 For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5])\n\
790 calculates ((((1 + 2) + 3) + 4) + 5).");
791
792 /* lru_cache object **********************************************************/
793
794 /* There are four principal algorithmic differences from the pure python version:
795
796 1). The C version relies on the GIL instead of having its own reentrant lock.
797
798 2). The prev/next link fields use borrowed references.
799
800 3). For a full cache, the pure python version rotates the location of the
801 root entry so that it never has to move individual links and it can
802 limit updates to just the key and result fields. However, in the C
803 version, links are temporarily removed while the cache dict updates are
804 occurring. Afterwards, they are appended or prepended back into the
805 doubly-linked lists.
806
807 4) In the Python version, the _HashSeq class is used to prevent __hash__
808 from being called more than once. In the C version, the "known hash"
809 variants of dictionary calls as used to the same effect.
810
811 */
812
813 struct lru_list_elem;
814 struct lru_cache_object;
815
816 typedef struct lru_list_elem {
817 PyObject_HEAD
818 struct lru_list_elem *prev, *next; /* borrowed links */
819 Py_hash_t hash;
820 PyObject *key, *result;
821 } lru_list_elem;
822
823 static void
lru_list_elem_dealloc(lru_list_elem * link)824 lru_list_elem_dealloc(lru_list_elem *link)
825 {
826 PyTypeObject *tp = Py_TYPE(link);
827 Py_XDECREF(link->key);
828 Py_XDECREF(link->result);
829 tp->tp_free(link);
830 Py_DECREF(tp);
831 }
832
833 static PyType_Slot lru_list_elem_type_slots[] = {
834 {Py_tp_dealloc, lru_list_elem_dealloc},
835 {0, 0}
836 };
837
838 static PyType_Spec lru_list_elem_type_spec = {
839 .name = "functools._lru_list_elem",
840 .basicsize = sizeof(lru_list_elem),
841 .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION |
842 Py_TPFLAGS_IMMUTABLETYPE,
843 .slots = lru_list_elem_type_slots
844 };
845
846
847 typedef PyObject *(*lru_cache_ternaryfunc)(struct lru_cache_object *, PyObject *, PyObject *);
848
849 typedef struct lru_cache_object {
850 lru_list_elem root; /* includes PyObject_HEAD */
851 lru_cache_ternaryfunc wrapper;
852 int typed;
853 PyObject *cache;
854 Py_ssize_t hits;
855 PyObject *func;
856 Py_ssize_t maxsize;
857 Py_ssize_t misses;
858 /* the kwd_mark is used delimit args and keywords in the cache keys */
859 PyObject *kwd_mark;
860 PyTypeObject *lru_list_elem_type;
861 PyObject *cache_info_type;
862 PyObject *dict;
863 PyObject *weakreflist;
864 } lru_cache_object;
865
866 static PyObject *
lru_cache_make_key(PyObject * kwd_mark,PyObject * args,PyObject * kwds,int typed)867 lru_cache_make_key(PyObject *kwd_mark, PyObject *args,
868 PyObject *kwds, int typed)
869 {
870 PyObject *key, *keyword, *value;
871 Py_ssize_t key_size, pos, key_pos, kwds_size;
872
873 kwds_size = kwds ? PyDict_GET_SIZE(kwds) : 0;
874
875 /* short path, key will match args anyway, which is a tuple */
876 if (!typed && !kwds_size) {
877 if (PyTuple_GET_SIZE(args) == 1) {
878 key = PyTuple_GET_ITEM(args, 0);
879 if (PyUnicode_CheckExact(key) || PyLong_CheckExact(key)) {
880 /* For common scalar keys, save space by
881 dropping the enclosing args tuple */
882 return Py_NewRef(key);
883 }
884 }
885 return Py_NewRef(args);
886 }
887
888 key_size = PyTuple_GET_SIZE(args);
889 if (kwds_size)
890 key_size += kwds_size * 2 + 1;
891 if (typed)
892 key_size += PyTuple_GET_SIZE(args) + kwds_size;
893
894 key = PyTuple_New(key_size);
895 if (key == NULL)
896 return NULL;
897
898 key_pos = 0;
899 for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) {
900 PyObject *item = PyTuple_GET_ITEM(args, pos);
901 PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item));
902 }
903 if (kwds_size) {
904 PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(kwd_mark));
905 for (pos = 0; PyDict_Next(kwds, &pos, &keyword, &value);) {
906 PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(keyword));
907 PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(value));
908 }
909 assert(key_pos == PyTuple_GET_SIZE(args) + kwds_size * 2 + 1);
910 }
911 if (typed) {
912 for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) {
913 PyObject *item = (PyObject *)Py_TYPE(PyTuple_GET_ITEM(args, pos));
914 PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item));
915 }
916 if (kwds_size) {
917 for (pos = 0; PyDict_Next(kwds, &pos, &keyword, &value);) {
918 PyObject *item = (PyObject *)Py_TYPE(value);
919 PyTuple_SET_ITEM(key, key_pos++, Py_NewRef(item));
920 }
921 }
922 }
923 assert(key_pos == key_size);
924 return key;
925 }
926
927 static PyObject *
uncached_lru_cache_wrapper(lru_cache_object * self,PyObject * args,PyObject * kwds)928 uncached_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
929 {
930 PyObject *result;
931
932 self->misses++;
933 result = PyObject_Call(self->func, args, kwds);
934 if (!result)
935 return NULL;
936 return result;
937 }
938
939 static PyObject *
infinite_lru_cache_wrapper(lru_cache_object * self,PyObject * args,PyObject * kwds)940 infinite_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
941 {
942 PyObject *result;
943 Py_hash_t hash;
944 PyObject *key = lru_cache_make_key(self->kwd_mark, args, kwds, self->typed);
945 if (!key)
946 return NULL;
947 hash = PyObject_Hash(key);
948 if (hash == -1) {
949 Py_DECREF(key);
950 return NULL;
951 }
952 result = _PyDict_GetItem_KnownHash(self->cache, key, hash);
953 if (result) {
954 Py_INCREF(result);
955 self->hits++;
956 Py_DECREF(key);
957 return result;
958 }
959 if (PyErr_Occurred()) {
960 Py_DECREF(key);
961 return NULL;
962 }
963 self->misses++;
964 result = PyObject_Call(self->func, args, kwds);
965 if (!result) {
966 Py_DECREF(key);
967 return NULL;
968 }
969 if (_PyDict_SetItem_KnownHash(self->cache, key, result, hash) < 0) {
970 Py_DECREF(result);
971 Py_DECREF(key);
972 return NULL;
973 }
974 Py_DECREF(key);
975 return result;
976 }
977
978 static void
lru_cache_extract_link(lru_list_elem * link)979 lru_cache_extract_link(lru_list_elem *link)
980 {
981 lru_list_elem *link_prev = link->prev;
982 lru_list_elem *link_next = link->next;
983 link_prev->next = link->next;
984 link_next->prev = link->prev;
985 }
986
987 static void
lru_cache_append_link(lru_cache_object * self,lru_list_elem * link)988 lru_cache_append_link(lru_cache_object *self, lru_list_elem *link)
989 {
990 lru_list_elem *root = &self->root;
991 lru_list_elem *last = root->prev;
992 last->next = root->prev = link;
993 link->prev = last;
994 link->next = root;
995 }
996
997 static void
lru_cache_prepend_link(lru_cache_object * self,lru_list_elem * link)998 lru_cache_prepend_link(lru_cache_object *self, lru_list_elem *link)
999 {
1000 lru_list_elem *root = &self->root;
1001 lru_list_elem *first = root->next;
1002 first->prev = root->next = link;
1003 link->prev = root;
1004 link->next = first;
1005 }
1006
1007 /* General note on reentrancy:
1008
1009 There are four dictionary calls in the bounded_lru_cache_wrapper():
1010 1) The initial check for a cache match. 2) The post user-function
1011 check for a cache match. 3) The deletion of the oldest entry.
1012 4) The addition of the newest entry.
1013
1014 In all four calls, we have a known hash which lets use avoid a call
1015 to __hash__(). That leaves only __eq__ as a possible source of a
1016 reentrant call.
1017
1018 The __eq__ method call is always made for a cache hit (dict access #1).
1019 Accordingly, we have make sure not modify the cache state prior to
1020 this call.
1021
1022 The __eq__ method call is never made for the deletion (dict access #3)
1023 because it is an identity match.
1024
1025 For the other two accesses (#2 and #4), calls to __eq__ only occur
1026 when some other entry happens to have an exactly matching hash (all
1027 64-bits). Though rare, this can happen, so we have to make sure to
1028 either call it at the top of its code path before any cache
1029 state modifications (dict access #2) or be prepared to restore
1030 invariants at the end of the code path (dict access #4).
1031
1032 Another possible source of reentrancy is a decref which can trigger
1033 arbitrary code execution. To make the code easier to reason about,
1034 the decrefs are deferred to the end of the each possible code path
1035 so that we know the cache is a consistent state.
1036 */
1037
1038 static PyObject *
bounded_lru_cache_wrapper(lru_cache_object * self,PyObject * args,PyObject * kwds)1039 bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
1040 {
1041 lru_list_elem *link;
1042 PyObject *key, *result, *testresult;
1043 Py_hash_t hash;
1044
1045 key = lru_cache_make_key(self->kwd_mark, args, kwds, self->typed);
1046 if (!key)
1047 return NULL;
1048 hash = PyObject_Hash(key);
1049 if (hash == -1) {
1050 Py_DECREF(key);
1051 return NULL;
1052 }
1053 link = (lru_list_elem *)_PyDict_GetItem_KnownHash(self->cache, key, hash);
1054 if (link != NULL) {
1055 lru_cache_extract_link(link);
1056 lru_cache_append_link(self, link);
1057 result = link->result;
1058 self->hits++;
1059 Py_INCREF(result);
1060 Py_DECREF(key);
1061 return result;
1062 }
1063 if (PyErr_Occurred()) {
1064 Py_DECREF(key);
1065 return NULL;
1066 }
1067 self->misses++;
1068 result = PyObject_Call(self->func, args, kwds);
1069 if (!result) {
1070 Py_DECREF(key);
1071 return NULL;
1072 }
1073 testresult = _PyDict_GetItem_KnownHash(self->cache, key, hash);
1074 if (testresult != NULL) {
1075 /* Getting here means that this same key was added to the cache
1076 during the PyObject_Call(). Since the link update is already
1077 done, we need only return the computed result. */
1078 Py_DECREF(key);
1079 return result;
1080 }
1081 if (PyErr_Occurred()) {
1082 /* This is an unusual case since this same lookup
1083 did not previously trigger an error during lookup.
1084 Treat it the same as an error in user function
1085 and return with the error set. */
1086 Py_DECREF(key);
1087 Py_DECREF(result);
1088 return NULL;
1089 }
1090 /* This is the normal case. The new key wasn't found before
1091 user function call and it is still not there. So we
1092 proceed normally and update the cache with the new result. */
1093
1094 assert(self->maxsize > 0);
1095 if (PyDict_GET_SIZE(self->cache) < self->maxsize ||
1096 self->root.next == &self->root)
1097 {
1098 /* Cache is not full, so put the result in a new link */
1099 link = (lru_list_elem *)PyObject_New(lru_list_elem,
1100 self->lru_list_elem_type);
1101 if (link == NULL) {
1102 Py_DECREF(key);
1103 Py_DECREF(result);
1104 return NULL;
1105 }
1106
1107 link->hash = hash;
1108 link->key = key;
1109 link->result = result;
1110 /* What is really needed here is a SetItem variant with a "no clobber"
1111 option. If the __eq__ call triggers a reentrant call that adds
1112 this same key, then this setitem call will update the cache dict
1113 with this new link, leaving the old link as an orphan (i.e. not
1114 having a cache dict entry that refers to it). */
1115 if (_PyDict_SetItem_KnownHash(self->cache, key, (PyObject *)link,
1116 hash) < 0) {
1117 Py_DECREF(link);
1118 return NULL;
1119 }
1120 lru_cache_append_link(self, link);
1121 return Py_NewRef(result);
1122 }
1123 /* Since the cache is full, we need to evict an old key and add
1124 a new key. Rather than free the old link and allocate a new
1125 one, we reuse the link for the new key and result and move it
1126 to front of the cache to mark it as recently used.
1127
1128 We try to assure all code paths (including errors) leave all
1129 of the links in place. Either the link is successfully
1130 updated and moved or it is restored to its old position.
1131 However if an unrecoverable error is found, it doesn't
1132 make sense to reinsert the link, so we leave it out
1133 and the cache will no longer register as full.
1134 */
1135 PyObject *oldkey, *oldresult, *popresult;
1136
1137 /* Extract the oldest item. */
1138 assert(self->root.next != &self->root);
1139 link = self->root.next;
1140 lru_cache_extract_link(link);
1141 /* Remove it from the cache.
1142 The cache dict holds one reference to the link.
1143 We created one other reference when the link was created.
1144 The linked list only has borrowed references. */
1145 int res = _PyDict_Pop_KnownHash((PyDictObject*)self->cache, link->key,
1146 link->hash, &popresult);
1147 if (res < 0) {
1148 /* An error arose while trying to remove the oldest key (the one
1149 being evicted) from the cache. We restore the link to its
1150 original position as the oldest link. Then we allow the
1151 error propagate upward; treating it the same as an error
1152 arising in the user function. */
1153 lru_cache_prepend_link(self, link);
1154 Py_DECREF(key);
1155 Py_DECREF(result);
1156 return NULL;
1157 }
1158 if (res == 0) {
1159 /* Getting here means that the user function call or another
1160 thread has already removed the old key from the dictionary.
1161 This link is now an orphan. Since we don't want to leave the
1162 cache in an inconsistent state, we don't restore the link. */
1163 assert(popresult == NULL);
1164 Py_DECREF(link);
1165 Py_DECREF(key);
1166 return result;
1167 }
1168
1169 /* Keep a reference to the old key and old result to prevent their
1170 ref counts from going to zero during the update. That will
1171 prevent potentially arbitrary object clean-up code (i.e. __del__)
1172 from running while we're still adjusting the links. */
1173 assert(popresult != NULL);
1174 oldkey = link->key;
1175 oldresult = link->result;
1176
1177 link->hash = hash;
1178 link->key = key;
1179 link->result = result;
1180 /* Note: The link is being added to the cache dict without the
1181 prev and next fields set to valid values. We have to wait
1182 for successful insertion in the cache dict before adding the
1183 link to the linked list. Otherwise, the potentially reentrant
1184 __eq__ call could cause the then orphan link to be visited. */
1185 if (_PyDict_SetItem_KnownHash(self->cache, key, (PyObject *)link,
1186 hash) < 0) {
1187 /* Somehow the cache dict update failed. We no longer can
1188 restore the old link. Let the error propagate upward and
1189 leave the cache short one link. */
1190 Py_DECREF(popresult);
1191 Py_DECREF(link);
1192 Py_DECREF(oldkey);
1193 Py_DECREF(oldresult);
1194 return NULL;
1195 }
1196 lru_cache_append_link(self, link);
1197 Py_INCREF(result); /* for return */
1198 Py_DECREF(popresult);
1199 Py_DECREF(oldkey);
1200 Py_DECREF(oldresult);
1201 return result;
1202 }
1203
1204 static PyObject *
lru_cache_new(PyTypeObject * type,PyObject * args,PyObject * kw)1205 lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw)
1206 {
1207 PyObject *func, *maxsize_O, *cache_info_type, *cachedict;
1208 int typed;
1209 lru_cache_object *obj;
1210 Py_ssize_t maxsize;
1211 PyObject *(*wrapper)(lru_cache_object *, PyObject *, PyObject *);
1212 _functools_state *state;
1213 static char *keywords[] = {"user_function", "maxsize", "typed",
1214 "cache_info_type", NULL};
1215
1216 if (!PyArg_ParseTupleAndKeywords(args, kw, "OOpO:lru_cache", keywords,
1217 &func, &maxsize_O, &typed,
1218 &cache_info_type)) {
1219 return NULL;
1220 }
1221
1222 if (!PyCallable_Check(func)) {
1223 PyErr_SetString(PyExc_TypeError,
1224 "the first argument must be callable");
1225 return NULL;
1226 }
1227
1228 state = get_functools_state_by_type(type);
1229 if (state == NULL) {
1230 return NULL;
1231 }
1232
1233 /* select the caching function, and make/inc maxsize_O */
1234 if (maxsize_O == Py_None) {
1235 wrapper = infinite_lru_cache_wrapper;
1236 /* use this only to initialize lru_cache_object attribute maxsize */
1237 maxsize = -1;
1238 } else if (PyIndex_Check(maxsize_O)) {
1239 maxsize = PyNumber_AsSsize_t(maxsize_O, PyExc_OverflowError);
1240 if (maxsize == -1 && PyErr_Occurred())
1241 return NULL;
1242 if (maxsize < 0) {
1243 maxsize = 0;
1244 }
1245 if (maxsize == 0)
1246 wrapper = uncached_lru_cache_wrapper;
1247 else
1248 wrapper = bounded_lru_cache_wrapper;
1249 } else {
1250 PyErr_SetString(PyExc_TypeError, "maxsize should be integer or None");
1251 return NULL;
1252 }
1253
1254 if (!(cachedict = PyDict_New()))
1255 return NULL;
1256
1257 obj = (lru_cache_object *)type->tp_alloc(type, 0);
1258 if (obj == NULL) {
1259 Py_DECREF(cachedict);
1260 return NULL;
1261 }
1262
1263 obj->root.prev = &obj->root;
1264 obj->root.next = &obj->root;
1265 obj->wrapper = wrapper;
1266 obj->typed = typed;
1267 obj->cache = cachedict;
1268 obj->func = Py_NewRef(func);
1269 obj->misses = obj->hits = 0;
1270 obj->maxsize = maxsize;
1271 obj->kwd_mark = Py_NewRef(state->kwd_mark);
1272 obj->lru_list_elem_type = (PyTypeObject*)Py_NewRef(state->lru_list_elem_type);
1273 obj->cache_info_type = Py_NewRef(cache_info_type);
1274 obj->dict = NULL;
1275 obj->weakreflist = NULL;
1276 return (PyObject *)obj;
1277 }
1278
1279 static lru_list_elem *
lru_cache_unlink_list(lru_cache_object * self)1280 lru_cache_unlink_list(lru_cache_object *self)
1281 {
1282 lru_list_elem *root = &self->root;
1283 lru_list_elem *link = root->next;
1284 if (link == root)
1285 return NULL;
1286 root->prev->next = NULL;
1287 root->next = root->prev = root;
1288 return link;
1289 }
1290
1291 static void
lru_cache_clear_list(lru_list_elem * link)1292 lru_cache_clear_list(lru_list_elem *link)
1293 {
1294 while (link != NULL) {
1295 lru_list_elem *next = link->next;
1296 Py_SETREF(link, next);
1297 }
1298 }
1299
1300 static int
lru_cache_tp_clear(lru_cache_object * self)1301 lru_cache_tp_clear(lru_cache_object *self)
1302 {
1303 lru_list_elem *list = lru_cache_unlink_list(self);
1304 Py_CLEAR(self->cache);
1305 Py_CLEAR(self->func);
1306 Py_CLEAR(self->kwd_mark);
1307 Py_CLEAR(self->lru_list_elem_type);
1308 Py_CLEAR(self->cache_info_type);
1309 Py_CLEAR(self->dict);
1310 lru_cache_clear_list(list);
1311 return 0;
1312 }
1313
1314 static void
lru_cache_dealloc(lru_cache_object * obj)1315 lru_cache_dealloc(lru_cache_object *obj)
1316 {
1317 PyTypeObject *tp = Py_TYPE(obj);
1318 /* bpo-31095: UnTrack is needed before calling any callbacks */
1319 PyObject_GC_UnTrack(obj);
1320 if (obj->weakreflist != NULL) {
1321 PyObject_ClearWeakRefs((PyObject*)obj);
1322 }
1323
1324 (void)lru_cache_tp_clear(obj);
1325 tp->tp_free(obj);
1326 Py_DECREF(tp);
1327 }
1328
1329 static PyObject *
lru_cache_call(lru_cache_object * self,PyObject * args,PyObject * kwds)1330 lru_cache_call(lru_cache_object *self, PyObject *args, PyObject *kwds)
1331 {
1332 PyObject *result;
1333 Py_BEGIN_CRITICAL_SECTION(self);
1334 result = self->wrapper(self, args, kwds);
1335 Py_END_CRITICAL_SECTION();
1336 return result;
1337 }
1338
1339 static PyObject *
lru_cache_descr_get(PyObject * self,PyObject * obj,PyObject * type)1340 lru_cache_descr_get(PyObject *self, PyObject *obj, PyObject *type)
1341 {
1342 if (obj == Py_None || obj == NULL) {
1343 return Py_NewRef(self);
1344 }
1345 return PyMethod_New(self, obj);
1346 }
1347
1348 /*[clinic input]
1349 @critical_section
1350 _functools._lru_cache_wrapper.cache_info
1351
1352 Report cache statistics
1353 [clinic start generated code]*/
1354
1355 static PyObject *
_functools__lru_cache_wrapper_cache_info_impl(PyObject * self)1356 _functools__lru_cache_wrapper_cache_info_impl(PyObject *self)
1357 /*[clinic end generated code: output=cc796a0b06dbd717 input=00e1acb31aa21ecc]*/
1358 {
1359 lru_cache_object *_self = (lru_cache_object *) self;
1360 if (_self->maxsize == -1) {
1361 return PyObject_CallFunction(_self->cache_info_type, "nnOn",
1362 _self->hits, _self->misses, Py_None,
1363 PyDict_GET_SIZE(_self->cache));
1364 }
1365 return PyObject_CallFunction(_self->cache_info_type, "nnnn",
1366 _self->hits, _self->misses, _self->maxsize,
1367 PyDict_GET_SIZE(_self->cache));
1368 }
1369
1370 /*[clinic input]
1371 @critical_section
1372 _functools._lru_cache_wrapper.cache_clear
1373
1374 Clear the cache and cache statistics
1375 [clinic start generated code]*/
1376
1377 static PyObject *
_functools__lru_cache_wrapper_cache_clear_impl(PyObject * self)1378 _functools__lru_cache_wrapper_cache_clear_impl(PyObject *self)
1379 /*[clinic end generated code: output=58423b35efc3e381 input=dfa33acbecf8b4b2]*/
1380 {
1381 lru_cache_object *_self = (lru_cache_object *) self;
1382 lru_list_elem *list = lru_cache_unlink_list(_self);
1383 _self->hits = _self->misses = 0;
1384 PyDict_Clear(_self->cache);
1385 lru_cache_clear_list(list);
1386 Py_RETURN_NONE;
1387 }
1388
1389 static PyObject *
lru_cache_reduce(PyObject * self,PyObject * unused)1390 lru_cache_reduce(PyObject *self, PyObject *unused)
1391 {
1392 return PyObject_GetAttrString(self, "__qualname__");
1393 }
1394
1395 static PyObject *
lru_cache_copy(PyObject * self,PyObject * unused)1396 lru_cache_copy(PyObject *self, PyObject *unused)
1397 {
1398 return Py_NewRef(self);
1399 }
1400
1401 static PyObject *
lru_cache_deepcopy(PyObject * self,PyObject * unused)1402 lru_cache_deepcopy(PyObject *self, PyObject *unused)
1403 {
1404 return Py_NewRef(self);
1405 }
1406
1407 static int
lru_cache_tp_traverse(lru_cache_object * self,visitproc visit,void * arg)1408 lru_cache_tp_traverse(lru_cache_object *self, visitproc visit, void *arg)
1409 {
1410 Py_VISIT(Py_TYPE(self));
1411 lru_list_elem *link = self->root.next;
1412 while (link != &self->root) {
1413 lru_list_elem *next = link->next;
1414 Py_VISIT(link->key);
1415 Py_VISIT(link->result);
1416 Py_VISIT(Py_TYPE(link));
1417 link = next;
1418 }
1419 Py_VISIT(self->cache);
1420 Py_VISIT(self->func);
1421 Py_VISIT(self->kwd_mark);
1422 Py_VISIT(self->lru_list_elem_type);
1423 Py_VISIT(self->cache_info_type);
1424 Py_VISIT(self->dict);
1425 return 0;
1426 }
1427
1428
1429 PyDoc_STRVAR(lru_cache_doc,
1430 "Create a cached callable that wraps another function.\n\
1431 \n\
1432 user_function: the function being cached\n\
1433 \n\
1434 maxsize: 0 for no caching\n\
1435 None for unlimited cache size\n\
1436 n for a bounded cache\n\
1437 \n\
1438 typed: False cache f(3) and f(3.0) as identical calls\n\
1439 True cache f(3) and f(3.0) as distinct calls\n\
1440 \n\
1441 cache_info_type: namedtuple class with the fields:\n\
1442 hits misses currsize maxsize\n"
1443 );
1444
1445 static PyMethodDef lru_cache_methods[] = {
1446 _FUNCTOOLS__LRU_CACHE_WRAPPER_CACHE_INFO_METHODDEF
1447 _FUNCTOOLS__LRU_CACHE_WRAPPER_CACHE_CLEAR_METHODDEF
1448 {"__reduce__", (PyCFunction)lru_cache_reduce, METH_NOARGS},
1449 {"__copy__", (PyCFunction)lru_cache_copy, METH_VARARGS},
1450 {"__deepcopy__", (PyCFunction)lru_cache_deepcopy, METH_VARARGS},
1451 {NULL}
1452 };
1453
1454 static PyGetSetDef lru_cache_getsetlist[] = {
1455 {"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},
1456 {NULL}
1457 };
1458
1459 static PyMemberDef lru_cache_memberlist[] = {
1460 {"__dictoffset__", Py_T_PYSSIZET,
1461 offsetof(lru_cache_object, dict), Py_READONLY},
1462 {"__weaklistoffset__", Py_T_PYSSIZET,
1463 offsetof(lru_cache_object, weakreflist), Py_READONLY},
1464 {NULL} /* Sentinel */
1465 };
1466
1467 static PyType_Slot lru_cache_type_slots[] = {
1468 {Py_tp_dealloc, lru_cache_dealloc},
1469 {Py_tp_call, lru_cache_call},
1470 {Py_tp_doc, (void *)lru_cache_doc},
1471 {Py_tp_traverse, lru_cache_tp_traverse},
1472 {Py_tp_clear, lru_cache_tp_clear},
1473 {Py_tp_methods, lru_cache_methods},
1474 {Py_tp_members, lru_cache_memberlist},
1475 {Py_tp_getset, lru_cache_getsetlist},
1476 {Py_tp_descr_get, lru_cache_descr_get},
1477 {Py_tp_new, lru_cache_new},
1478 {0, 0}
1479 };
1480
1481 static PyType_Spec lru_cache_type_spec = {
1482 .name = "functools._lru_cache_wrapper",
1483 .basicsize = sizeof(lru_cache_object),
1484 .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
1485 Py_TPFLAGS_METHOD_DESCRIPTOR | Py_TPFLAGS_IMMUTABLETYPE,
1486 .slots = lru_cache_type_slots
1487 };
1488
1489
1490 /* module level code ********************************************************/
1491
1492 PyDoc_STRVAR(_functools_doc,
1493 "Tools that operate on functions.");
1494
1495 static PyMethodDef _functools_methods[] = {
1496 {"reduce", functools_reduce, METH_VARARGS, functools_reduce_doc},
1497 _FUNCTOOLS_CMP_TO_KEY_METHODDEF
1498 {NULL, NULL} /* sentinel */
1499 };
1500
1501 static int
_functools_exec(PyObject * module)1502 _functools_exec(PyObject *module)
1503 {
1504 _functools_state *state = get_functools_state(module);
1505 state->kwd_mark = _PyObject_CallNoArgs((PyObject *)&PyBaseObject_Type);
1506 if (state->kwd_mark == NULL) {
1507 return -1;
1508 }
1509
1510 state->partial_type = (PyTypeObject *)PyType_FromModuleAndSpec(module,
1511 &partial_type_spec, NULL);
1512 if (state->partial_type == NULL) {
1513 return -1;
1514 }
1515 if (PyModule_AddType(module, state->partial_type) < 0) {
1516 return -1;
1517 }
1518
1519 PyObject *lru_cache_type = PyType_FromModuleAndSpec(module,
1520 &lru_cache_type_spec, NULL);
1521 if (lru_cache_type == NULL) {
1522 return -1;
1523 }
1524 if (PyModule_AddType(module, (PyTypeObject *)lru_cache_type) < 0) {
1525 Py_DECREF(lru_cache_type);
1526 return -1;
1527 }
1528 Py_DECREF(lru_cache_type);
1529
1530 state->keyobject_type = (PyTypeObject *)PyType_FromModuleAndSpec(module,
1531 &keyobject_type_spec, NULL);
1532 if (state->keyobject_type == NULL) {
1533 return -1;
1534 }
1535 // keyobject_type is used only internally.
1536 // So we don't expose it in module namespace.
1537
1538 state->lru_list_elem_type = (PyTypeObject *)PyType_FromModuleAndSpec(
1539 module, &lru_list_elem_type_spec, NULL);
1540 if (state->lru_list_elem_type == NULL) {
1541 return -1;
1542 }
1543 // lru_list_elem is used only in _lru_cache_wrapper.
1544 // So we don't expose it in module namespace.
1545
1546 return 0;
1547 }
1548
1549 static int
_functools_traverse(PyObject * module,visitproc visit,void * arg)1550 _functools_traverse(PyObject *module, visitproc visit, void *arg)
1551 {
1552 _functools_state *state = get_functools_state(module);
1553 Py_VISIT(state->kwd_mark);
1554 Py_VISIT(state->partial_type);
1555 Py_VISIT(state->keyobject_type);
1556 Py_VISIT(state->lru_list_elem_type);
1557 return 0;
1558 }
1559
1560 static int
_functools_clear(PyObject * module)1561 _functools_clear(PyObject *module)
1562 {
1563 _functools_state *state = get_functools_state(module);
1564 Py_CLEAR(state->kwd_mark);
1565 Py_CLEAR(state->partial_type);
1566 Py_CLEAR(state->keyobject_type);
1567 Py_CLEAR(state->lru_list_elem_type);
1568 return 0;
1569 }
1570
1571 static void
_functools_free(void * module)1572 _functools_free(void *module)
1573 {
1574 _functools_clear((PyObject *)module);
1575 }
1576
1577 static struct PyModuleDef_Slot _functools_slots[] = {
1578 {Py_mod_exec, _functools_exec},
1579 {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED},
1580 {Py_mod_gil, Py_MOD_GIL_NOT_USED},
1581 {0, NULL}
1582 };
1583
1584 static struct PyModuleDef _functools_module = {
1585 PyModuleDef_HEAD_INIT,
1586 .m_name = "_functools",
1587 .m_doc = _functools_doc,
1588 .m_size = sizeof(_functools_state),
1589 .m_methods = _functools_methods,
1590 .m_slots = _functools_slots,
1591 .m_traverse = _functools_traverse,
1592 .m_clear = _functools_clear,
1593 .m_free = _functools_free,
1594 };
1595
1596 PyMODINIT_FUNC
PyInit__functools(void)1597 PyInit__functools(void)
1598 {
1599 return PyModuleDef_Init(&_functools_module);
1600 }
1601