1 /* Frame object implementation */
2
3 #include "Python.h"
4 #include "pycore_ceval.h" // _PyEval_BuiltinsFromGlobals()
5 #include "pycore_moduleobject.h" // _PyModule_GetDict()
6 #include "pycore_object.h" // _PyObject_GC_UNTRACK()
7
8 #include "frameobject.h" // PyFrameObject
9 #include "opcode.h" // EXTENDED_ARG
10 #include "structmember.h" // PyMemberDef
11
12 #define OFF(x) offsetof(PyFrameObject, x)
13
14 static PyMemberDef frame_memberlist[] = {
15 {"f_back", T_OBJECT, OFF(f_back), READONLY},
16 {"f_code", T_OBJECT, OFF(f_code), READONLY|PY_AUDIT_READ},
17 {"f_builtins", T_OBJECT, OFF(f_builtins), READONLY},
18 {"f_globals", T_OBJECT, OFF(f_globals), READONLY},
19 {"f_trace_lines", T_BOOL, OFF(f_trace_lines), 0},
20 {"f_trace_opcodes", T_BOOL, OFF(f_trace_opcodes), 0},
21 {NULL} /* Sentinel */
22 };
23
24 static struct _Py_frame_state *
get_frame_state(void)25 get_frame_state(void)
26 {
27 PyInterpreterState *interp = _PyInterpreterState_GET();
28 return &interp->frame;
29 }
30
31
32 static PyObject *
frame_getlocals(PyFrameObject * f,void * closure)33 frame_getlocals(PyFrameObject *f, void *closure)
34 {
35 if (PyFrame_FastToLocalsWithError(f) < 0)
36 return NULL;
37 Py_INCREF(f->f_locals);
38 return f->f_locals;
39 }
40
41 int
PyFrame_GetLineNumber(PyFrameObject * f)42 PyFrame_GetLineNumber(PyFrameObject *f)
43 {
44 assert(f != NULL);
45 if (f->f_lineno != 0) {
46 return f->f_lineno;
47 }
48 else {
49 return PyCode_Addr2Line(f->f_code, f->f_lasti*sizeof(_Py_CODEUNIT));
50 }
51 }
52
53 static PyObject *
frame_getlineno(PyFrameObject * f,void * closure)54 frame_getlineno(PyFrameObject *f, void *closure)
55 {
56 int lineno = PyFrame_GetLineNumber(f);
57 if (lineno < 0) {
58 Py_RETURN_NONE;
59 }
60 else {
61 return PyLong_FromLong(lineno);
62 }
63 }
64
65 static PyObject *
frame_getlasti(PyFrameObject * f,void * closure)66 frame_getlasti(PyFrameObject *f, void *closure)
67 {
68 if (f->f_lasti < 0) {
69 return PyLong_FromLong(-1);
70 }
71 return PyLong_FromLong(f->f_lasti*sizeof(_Py_CODEUNIT));
72 }
73
74
75 /* Given the index of the effective opcode,
76 scan back to construct the oparg with EXTENDED_ARG */
77 static unsigned int
get_arg(const _Py_CODEUNIT * codestr,Py_ssize_t i)78 get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i)
79 {
80 _Py_CODEUNIT word;
81 unsigned int oparg = _Py_OPARG(codestr[i]);
82 if (i >= 1 && _Py_OPCODE(word = codestr[i-1]) == EXTENDED_ARG) {
83 oparg |= _Py_OPARG(word) << 8;
84 if (i >= 2 && _Py_OPCODE(word = codestr[i-2]) == EXTENDED_ARG) {
85 oparg |= _Py_OPARG(word) << 16;
86 if (i >= 3 && _Py_OPCODE(word = codestr[i-3]) == EXTENDED_ARG) {
87 oparg |= _Py_OPARG(word) << 24;
88 }
89 }
90 }
91 return oparg;
92 }
93
94 typedef enum kind {
95 With = 1,
96 Loop = 2,
97 Try = 3,
98 Except = 4,
99 } Kind;
100
101 #define BITS_PER_BLOCK 3
102
103 static inline int64_t
push_block(int64_t stack,Kind kind)104 push_block(int64_t stack, Kind kind)
105 {
106 assert(stack < ((int64_t)1)<<(BITS_PER_BLOCK*CO_MAXBLOCKS));
107 return (stack << BITS_PER_BLOCK) | kind;
108 }
109
110 static inline int64_t
pop_block(int64_t stack)111 pop_block(int64_t stack)
112 {
113 assert(stack > 0);
114 return stack >> BITS_PER_BLOCK;
115 }
116
117 static inline Kind
top_block(int64_t stack)118 top_block(int64_t stack)
119 {
120 return stack & ((1<<BITS_PER_BLOCK)-1);
121 }
122
123 static int64_t *
markblocks(PyCodeObject * code_obj,int len)124 markblocks(PyCodeObject *code_obj, int len)
125 {
126 const _Py_CODEUNIT *code =
127 (const _Py_CODEUNIT *)PyBytes_AS_STRING(code_obj->co_code);
128 int64_t *blocks = PyMem_New(int64_t, len+1);
129 int i, j, opcode;
130
131 if (blocks == NULL) {
132 PyErr_NoMemory();
133 return NULL;
134 }
135 memset(blocks, -1, (len+1)*sizeof(int64_t));
136 blocks[0] = 0;
137 int todo = 1;
138 while (todo) {
139 todo = 0;
140 for (i = 0; i < len; i++) {
141 int64_t block_stack = blocks[i];
142 int64_t except_stack;
143 if (block_stack == -1) {
144 continue;
145 }
146 opcode = _Py_OPCODE(code[i]);
147 switch (opcode) {
148 case JUMP_IF_FALSE_OR_POP:
149 case JUMP_IF_TRUE_OR_POP:
150 case POP_JUMP_IF_FALSE:
151 case POP_JUMP_IF_TRUE:
152 case JUMP_IF_NOT_EXC_MATCH:
153 j = get_arg(code, i);
154 assert(j < len);
155 if (blocks[j] == -1 && j < i) {
156 todo = 1;
157 }
158 assert(blocks[j] == -1 || blocks[j] == block_stack);
159 blocks[j] = block_stack;
160 blocks[i+1] = block_stack;
161 break;
162 case JUMP_ABSOLUTE:
163 j = get_arg(code, i);
164 assert(j < len);
165 if (blocks[j] == -1 && j < i) {
166 todo = 1;
167 }
168 assert(blocks[j] == -1 || blocks[j] == block_stack);
169 blocks[j] = block_stack;
170 break;
171 case SETUP_FINALLY:
172 j = get_arg(code, i) + i + 1;
173 assert(j < len);
174 except_stack = push_block(block_stack, Except);
175 assert(blocks[j] == -1 || blocks[j] == except_stack);
176 blocks[j] = except_stack;
177 block_stack = push_block(block_stack, Try);
178 blocks[i+1] = block_stack;
179 break;
180 case SETUP_WITH:
181 case SETUP_ASYNC_WITH:
182 j = get_arg(code, i) + i + 1;
183 assert(j < len);
184 except_stack = push_block(block_stack, Except);
185 assert(blocks[j] == -1 || blocks[j] == except_stack);
186 blocks[j] = except_stack;
187 block_stack = push_block(block_stack, With);
188 blocks[i+1] = block_stack;
189 break;
190 case JUMP_FORWARD:
191 j = get_arg(code, i) + i + 1;
192 assert(j < len);
193 assert(blocks[j] == -1 || blocks[j] == block_stack);
194 blocks[j] = block_stack;
195 break;
196 case GET_ITER:
197 case GET_AITER:
198 block_stack = push_block(block_stack, Loop);
199 blocks[i+1] = block_stack;
200 break;
201 case FOR_ITER:
202 blocks[i+1] = block_stack;
203 block_stack = pop_block(block_stack);
204 j = get_arg(code, i) + i + 1;
205 assert(j < len);
206 assert(blocks[j] == -1 || blocks[j] == block_stack);
207 blocks[j] = block_stack;
208 break;
209 case POP_BLOCK:
210 case POP_EXCEPT:
211 block_stack = pop_block(block_stack);
212 blocks[i+1] = block_stack;
213 break;
214 case END_ASYNC_FOR:
215 block_stack = pop_block(pop_block(block_stack));
216 blocks[i+1] = block_stack;
217 break;
218 case RETURN_VALUE:
219 case RAISE_VARARGS:
220 case RERAISE:
221 /* End of block */
222 break;
223 default:
224 blocks[i+1] = block_stack;
225
226 }
227 }
228 }
229 return blocks;
230 }
231
232 static int
compatible_block_stack(int64_t from_stack,int64_t to_stack)233 compatible_block_stack(int64_t from_stack, int64_t to_stack)
234 {
235 if (to_stack < 0) {
236 return 0;
237 }
238 while(from_stack > to_stack) {
239 from_stack = pop_block(from_stack);
240 }
241 return from_stack == to_stack;
242 }
243
244 static const char *
explain_incompatible_block_stack(int64_t to_stack)245 explain_incompatible_block_stack(int64_t to_stack)
246 {
247 Kind target_kind = top_block(to_stack);
248 switch(target_kind) {
249 case Except:
250 return "can't jump into an 'except' block as there's no exception";
251 case Try:
252 return "can't jump into the body of a try statement";
253 case With:
254 return "can't jump into the body of a with statement";
255 case Loop:
256 return "can't jump into the body of a for loop";
257 default:
258 Py_UNREACHABLE();
259 }
260 }
261
262 static int *
marklines(PyCodeObject * code,int len)263 marklines(PyCodeObject *code, int len)
264 {
265 PyCodeAddressRange bounds;
266 _PyCode_InitAddressRange(code, &bounds);
267 assert (bounds.ar_end == 0);
268
269 int *linestarts = PyMem_New(int, len);
270 if (linestarts == NULL) {
271 return NULL;
272 }
273 for (int i = 0; i < len; i++) {
274 linestarts[i] = -1;
275 }
276
277 while (PyLineTable_NextAddressRange(&bounds)) {
278 assert(bounds.ar_start/(int)sizeof(_Py_CODEUNIT) < len);
279 linestarts[bounds.ar_start/sizeof(_Py_CODEUNIT)] = bounds.ar_line;
280 }
281 return linestarts;
282 }
283
284 static int
first_line_not_before(int * lines,int len,int line)285 first_line_not_before(int *lines, int len, int line)
286 {
287 int result = INT_MAX;
288 for (int i = 0; i < len; i++) {
289 if (lines[i] < result && lines[i] >= line) {
290 result = lines[i];
291 }
292 }
293 if (result == INT_MAX) {
294 return -1;
295 }
296 return result;
297 }
298
299 static void
frame_stack_pop(PyFrameObject * f)300 frame_stack_pop(PyFrameObject *f)
301 {
302 assert(f->f_stackdepth >= 0);
303 f->f_stackdepth--;
304 PyObject *v = f->f_valuestack[f->f_stackdepth];
305 Py_DECREF(v);
306 }
307
308 static void
frame_block_unwind(PyFrameObject * f)309 frame_block_unwind(PyFrameObject *f)
310 {
311 assert(f->f_stackdepth >= 0);
312 assert(f->f_iblock > 0);
313 f->f_iblock--;
314 PyTryBlock *b = &f->f_blockstack[f->f_iblock];
315 intptr_t delta = f->f_stackdepth - b->b_level;
316 while (delta > 0) {
317 frame_stack_pop(f);
318 delta--;
319 }
320 }
321
322
323 /* Setter for f_lineno - you can set f_lineno from within a trace function in
324 * order to jump to a given line of code, subject to some restrictions. Most
325 * lines are OK to jump to because they don't make any assumptions about the
326 * state of the stack (obvious because you could remove the line and the code
327 * would still work without any stack errors), but there are some constructs
328 * that limit jumping:
329 *
330 * o Lines with an 'except' statement on them can't be jumped to, because
331 * they expect an exception to be on the top of the stack.
332 * o Lines that live in a 'finally' block can't be jumped from or to, since
333 * we cannot be sure which state the interpreter was in or would be in
334 * during execution of the finally block.
335 * o 'try', 'with' and 'async with' blocks can't be jumped into because
336 * the blockstack needs to be set up before their code runs.
337 * o 'for' and 'async for' loops can't be jumped into because the
338 * iterator needs to be on the stack.
339 * o Jumps cannot be made from within a trace function invoked with a
340 * 'return' or 'exception' event since the eval loop has been exited at
341 * that time.
342 */
343 static int
frame_setlineno(PyFrameObject * f,PyObject * p_new_lineno,void * Py_UNUSED (ignored))344 frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignored))
345 {
346 if (p_new_lineno == NULL) {
347 PyErr_SetString(PyExc_AttributeError, "cannot delete attribute");
348 return -1;
349 }
350 /* f_lineno must be an integer. */
351 if (!PyLong_CheckExact(p_new_lineno)) {
352 PyErr_SetString(PyExc_ValueError,
353 "lineno must be an integer");
354 return -1;
355 }
356
357 /*
358 * This code preserves the historical restrictions on
359 * setting the line number of a frame.
360 * Jumps are forbidden on a 'return' trace event (except after a yield).
361 * Jumps from 'call' trace events are also forbidden.
362 * In addition, jumps are forbidden when not tracing,
363 * as this is a debugging feature.
364 */
365 switch(f->f_state) {
366 case FRAME_CREATED:
367 PyErr_Format(PyExc_ValueError,
368 "can't jump from the 'call' trace event of a new frame");
369 return -1;
370 case FRAME_RETURNED:
371 case FRAME_UNWINDING:
372 case FRAME_RAISED:
373 case FRAME_CLEARED:
374 PyErr_SetString(PyExc_ValueError,
375 "can only jump from a 'line' trace event");
376 return -1;
377 case FRAME_EXECUTING:
378 case FRAME_SUSPENDED:
379 /* You can only do this from within a trace function, not via
380 * _getframe or similar hackery. */
381 if (!f->f_trace) {
382 PyErr_Format(PyExc_ValueError,
383 "f_lineno can only be set by a trace function");
384 return -1;
385 }
386 break;
387 }
388
389 int new_lineno;
390
391 /* Fail if the line falls outside the code block and
392 select first line with actual code. */
393 int overflow;
394 long l_new_lineno = PyLong_AsLongAndOverflow(p_new_lineno, &overflow);
395 if (overflow
396 #if SIZEOF_LONG > SIZEOF_INT
397 || l_new_lineno > INT_MAX
398 || l_new_lineno < INT_MIN
399 #endif
400 ) {
401 PyErr_SetString(PyExc_ValueError,
402 "lineno out of range");
403 return -1;
404 }
405 new_lineno = (int)l_new_lineno;
406
407 if (new_lineno < f->f_code->co_firstlineno) {
408 PyErr_Format(PyExc_ValueError,
409 "line %d comes before the current code block",
410 new_lineno);
411 return -1;
412 }
413
414 /* PyCode_NewWithPosOnlyArgs limits co_code to be under INT_MAX so this
415 * should never overflow. */
416 int len = (int)(PyBytes_GET_SIZE(f->f_code->co_code) / sizeof(_Py_CODEUNIT));
417 int *lines = marklines(f->f_code, len);
418 if (lines == NULL) {
419 return -1;
420 }
421
422 new_lineno = first_line_not_before(lines, len, new_lineno);
423 if (new_lineno < 0) {
424 PyErr_Format(PyExc_ValueError,
425 "line %d comes after the current code block",
426 (int)l_new_lineno);
427 PyMem_Free(lines);
428 return -1;
429 }
430
431 int64_t *blocks = markblocks(f->f_code, len);
432 if (blocks == NULL) {
433 PyMem_Free(lines);
434 return -1;
435 }
436
437 int64_t target_block_stack = -1;
438 int64_t best_block_stack = -1;
439 int best_addr = -1;
440 int64_t start_block_stack = blocks[f->f_lasti];
441 const char *msg = "cannot find bytecode for specified line";
442 for (int i = 0; i < len; i++) {
443 if (lines[i] == new_lineno) {
444 target_block_stack = blocks[i];
445 if (compatible_block_stack(start_block_stack, target_block_stack)) {
446 msg = NULL;
447 if (target_block_stack > best_block_stack) {
448 best_block_stack = target_block_stack;
449 best_addr = i;
450 }
451 }
452 else if (msg) {
453 if (target_block_stack >= 0) {
454 msg = explain_incompatible_block_stack(target_block_stack);
455 }
456 else {
457 msg = "code may be unreachable.";
458 }
459 }
460 }
461 }
462 PyMem_Free(blocks);
463 PyMem_Free(lines);
464 if (msg != NULL) {
465 PyErr_SetString(PyExc_ValueError, msg);
466 return -1;
467 }
468
469 /* Unwind block stack. */
470 while (start_block_stack > best_block_stack) {
471 Kind kind = top_block(start_block_stack);
472 switch(kind) {
473 case Loop:
474 frame_stack_pop(f);
475 break;
476 case Try:
477 frame_block_unwind(f);
478 break;
479 case With:
480 frame_block_unwind(f);
481 // Pop the exit function
482 frame_stack_pop(f);
483 break;
484 case Except:
485 PyErr_SetString(PyExc_ValueError,
486 "can't jump out of an 'except' block");
487 return -1;
488 }
489 start_block_stack = pop_block(start_block_stack);
490 }
491
492 /* Finally set the new f_lasti and return OK. */
493 f->f_lineno = 0;
494 f->f_lasti = best_addr;
495 return 0;
496 }
497
498 static PyObject *
frame_gettrace(PyFrameObject * f,void * closure)499 frame_gettrace(PyFrameObject *f, void *closure)
500 {
501 PyObject* trace = f->f_trace;
502
503 if (trace == NULL)
504 trace = Py_None;
505
506 Py_INCREF(trace);
507
508 return trace;
509 }
510
511 static int
frame_settrace(PyFrameObject * f,PyObject * v,void * closure)512 frame_settrace(PyFrameObject *f, PyObject* v, void *closure)
513 {
514 if (v == Py_None) {
515 v = NULL;
516 }
517 Py_XINCREF(v);
518 Py_XSETREF(f->f_trace, v);
519
520 return 0;
521 }
522
523
524 static PyGetSetDef frame_getsetlist[] = {
525 {"f_locals", (getter)frame_getlocals, NULL, NULL},
526 {"f_lineno", (getter)frame_getlineno,
527 (setter)frame_setlineno, NULL},
528 {"f_trace", (getter)frame_gettrace, (setter)frame_settrace, NULL},
529 {"f_lasti", (getter)frame_getlasti, NULL, NULL},
530 {0}
531 };
532
533 /* Stack frames are allocated and deallocated at a considerable rate.
534 In an attempt to improve the speed of function calls, we:
535
536 1. Hold a single "zombie" frame on each code object. This retains
537 the allocated and initialised frame object from an invocation of
538 the code object. The zombie is reanimated the next time we need a
539 frame object for that code object. Doing this saves the malloc/
540 realloc required when using a free_list frame that isn't the
541 correct size. It also saves some field initialisation.
542
543 In zombie mode, no field of PyFrameObject holds a reference, but
544 the following fields are still valid:
545
546 * ob_type, ob_size, f_code, f_valuestack;
547
548 * f_locals, f_trace are NULL;
549
550 * f_localsplus does not require re-allocation and
551 the local variables in f_localsplus are NULL.
552
553 2. We also maintain a separate free list of stack frames (just like
554 floats are allocated in a special way -- see floatobject.c). When
555 a stack frame is on the free list, only the following members have
556 a meaning:
557 ob_type == &Frametype
558 f_back next item on free list, or NULL
559 f_stacksize size of value stack
560 ob_size size of localsplus
561 Note that the value and block stacks are preserved -- this can save
562 another malloc() call or two (and two free() calls as well!).
563 Also note that, unlike for integers, each frame object is a
564 malloc'ed object in its own right -- it is only the actual calls to
565 malloc() that we are trying to save here, not the administration.
566 After all, while a typical program may make millions of calls, a
567 call depth of more than 20 or 30 is probably already exceptional
568 unless the program contains run-away recursion. I hope.
569
570 Later, PyFrame_MAXFREELIST was added to bound the # of frames saved on
571 free_list. Else programs creating lots of cyclic trash involving
572 frames could provoke free_list into growing without bound.
573 */
574 /* max value for numfree */
575 #define PyFrame_MAXFREELIST 200
576
577 static void _Py_HOT_FUNCTION
frame_dealloc(PyFrameObject * f)578 frame_dealloc(PyFrameObject *f)
579 {
580 if (_PyObject_GC_IS_TRACKED(f)) {
581 _PyObject_GC_UNTRACK(f);
582 }
583
584 Py_TRASHCAN_BEGIN(f, frame_dealloc);
585 /* Kill all local variables */
586 PyObject **valuestack = f->f_valuestack;
587 for (PyObject **p = f->f_localsplus; p < valuestack; p++) {
588 Py_CLEAR(*p);
589 }
590
591 /* Free stack */
592 for (int i = 0; i < f->f_stackdepth; i++) {
593 Py_XDECREF(f->f_valuestack[i]);
594 }
595 f->f_stackdepth = 0;
596
597 Py_XDECREF(f->f_back);
598 Py_DECREF(f->f_builtins);
599 Py_DECREF(f->f_globals);
600 Py_CLEAR(f->f_locals);
601 Py_CLEAR(f->f_trace);
602
603 PyCodeObject *co = f->f_code;
604 if (co->co_zombieframe == NULL) {
605 co->co_zombieframe = f;
606 }
607 else {
608 struct _Py_frame_state *state = get_frame_state();
609 #ifdef Py_DEBUG
610 // frame_dealloc() must not be called after _PyFrame_Fini()
611 assert(state->numfree != -1);
612 #endif
613 if (state->numfree < PyFrame_MAXFREELIST) {
614 ++state->numfree;
615 f->f_back = state->free_list;
616 state->free_list = f;
617 }
618 else {
619 PyObject_GC_Del(f);
620 }
621 }
622
623 Py_DECREF(co);
624 Py_TRASHCAN_END;
625 }
626
627 static inline Py_ssize_t
frame_nslots(PyFrameObject * frame)628 frame_nslots(PyFrameObject *frame)
629 {
630 PyCodeObject *code = frame->f_code;
631 return (code->co_nlocals
632 + PyTuple_GET_SIZE(code->co_cellvars)
633 + PyTuple_GET_SIZE(code->co_freevars));
634 }
635
636 static int
frame_traverse(PyFrameObject * f,visitproc visit,void * arg)637 frame_traverse(PyFrameObject *f, visitproc visit, void *arg)
638 {
639 Py_VISIT(f->f_back);
640 Py_VISIT(f->f_code);
641 Py_VISIT(f->f_builtins);
642 Py_VISIT(f->f_globals);
643 Py_VISIT(f->f_locals);
644 Py_VISIT(f->f_trace);
645
646 /* locals */
647 PyObject **fastlocals = f->f_localsplus;
648 for (Py_ssize_t i = frame_nslots(f); --i >= 0; ++fastlocals) {
649 Py_VISIT(*fastlocals);
650 }
651
652 /* stack */
653 for (int i = 0; i < f->f_stackdepth; i++) {
654 Py_VISIT(f->f_valuestack[i]);
655 }
656 return 0;
657 }
658
659 static int
frame_tp_clear(PyFrameObject * f)660 frame_tp_clear(PyFrameObject *f)
661 {
662 /* Before anything else, make sure that this frame is clearly marked
663 * as being defunct! Else, e.g., a generator reachable from this
664 * frame may also point to this frame, believe itself to still be
665 * active, and try cleaning up this frame again.
666 */
667 f->f_state = FRAME_CLEARED;
668
669 Py_CLEAR(f->f_trace);
670
671 /* locals */
672 PyObject **fastlocals = f->f_localsplus;
673 for (Py_ssize_t i = frame_nslots(f); --i >= 0; ++fastlocals) {
674 Py_CLEAR(*fastlocals);
675 }
676
677 /* stack */
678 for (int i = 0; i < f->f_stackdepth; i++) {
679 Py_CLEAR(f->f_valuestack[i]);
680 }
681 f->f_stackdepth = 0;
682 return 0;
683 }
684
685 static PyObject *
frame_clear(PyFrameObject * f,PyObject * Py_UNUSED (ignored))686 frame_clear(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
687 {
688 if (_PyFrame_IsExecuting(f)) {
689 PyErr_SetString(PyExc_RuntimeError,
690 "cannot clear an executing frame");
691 return NULL;
692 }
693 if (f->f_gen) {
694 _PyGen_Finalize(f->f_gen);
695 assert(f->f_gen == NULL);
696 }
697 (void)frame_tp_clear(f);
698 Py_RETURN_NONE;
699 }
700
701 PyDoc_STRVAR(clear__doc__,
702 "F.clear(): clear most references held by the frame");
703
704 static PyObject *
frame_sizeof(PyFrameObject * f,PyObject * Py_UNUSED (ignored))705 frame_sizeof(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
706 {
707 Py_ssize_t res, extras, ncells, nfrees;
708
709 PyCodeObject *code = f->f_code;
710 ncells = PyTuple_GET_SIZE(code->co_cellvars);
711 nfrees = PyTuple_GET_SIZE(code->co_freevars);
712 extras = code->co_stacksize + code->co_nlocals + ncells + nfrees;
713 /* subtract one as it is already included in PyFrameObject */
714 res = sizeof(PyFrameObject) + (extras-1) * sizeof(PyObject *);
715
716 return PyLong_FromSsize_t(res);
717 }
718
719 PyDoc_STRVAR(sizeof__doc__,
720 "F.__sizeof__() -> size of F in memory, in bytes");
721
722 static PyObject *
frame_repr(PyFrameObject * f)723 frame_repr(PyFrameObject *f)
724 {
725 int lineno = PyFrame_GetLineNumber(f);
726 PyCodeObject *code = f->f_code;
727 return PyUnicode_FromFormat(
728 "<frame at %p, file %R, line %d, code %S>",
729 f, code->co_filename, lineno, code->co_name);
730 }
731
732 static PyMethodDef frame_methods[] = {
733 {"clear", (PyCFunction)frame_clear, METH_NOARGS,
734 clear__doc__},
735 {"__sizeof__", (PyCFunction)frame_sizeof, METH_NOARGS,
736 sizeof__doc__},
737 {NULL, NULL} /* sentinel */
738 };
739
740 PyTypeObject PyFrame_Type = {
741 PyVarObject_HEAD_INIT(&PyType_Type, 0)
742 "frame",
743 sizeof(PyFrameObject),
744 sizeof(PyObject *),
745 (destructor)frame_dealloc, /* tp_dealloc */
746 0, /* tp_vectorcall_offset */
747 0, /* tp_getattr */
748 0, /* tp_setattr */
749 0, /* tp_as_async */
750 (reprfunc)frame_repr, /* tp_repr */
751 0, /* tp_as_number */
752 0, /* tp_as_sequence */
753 0, /* tp_as_mapping */
754 0, /* tp_hash */
755 0, /* tp_call */
756 0, /* tp_str */
757 PyObject_GenericGetAttr, /* tp_getattro */
758 PyObject_GenericSetAttr, /* tp_setattro */
759 0, /* tp_as_buffer */
760 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
761 0, /* tp_doc */
762 (traverseproc)frame_traverse, /* tp_traverse */
763 (inquiry)frame_tp_clear, /* tp_clear */
764 0, /* tp_richcompare */
765 0, /* tp_weaklistoffset */
766 0, /* tp_iter */
767 0, /* tp_iternext */
768 frame_methods, /* tp_methods */
769 frame_memberlist, /* tp_members */
770 frame_getsetlist, /* tp_getset */
771 0, /* tp_base */
772 0, /* tp_dict */
773 };
774
775 _Py_IDENTIFIER(__builtins__);
776
777 static inline PyFrameObject*
frame_alloc(PyCodeObject * code)778 frame_alloc(PyCodeObject *code)
779 {
780 PyFrameObject *f = code->co_zombieframe;
781 if (f != NULL) {
782 code->co_zombieframe = NULL;
783 _Py_NewReference((PyObject *)f);
784 assert(f->f_code == code);
785 return f;
786 }
787
788 Py_ssize_t ncells = PyTuple_GET_SIZE(code->co_cellvars);
789 Py_ssize_t nfrees = PyTuple_GET_SIZE(code->co_freevars);
790 Py_ssize_t extras = code->co_stacksize + code->co_nlocals + ncells + nfrees;
791 struct _Py_frame_state *state = get_frame_state();
792 if (state->free_list == NULL)
793 {
794 f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras);
795 if (f == NULL) {
796 return NULL;
797 }
798 }
799 else {
800 #ifdef Py_DEBUG
801 // frame_alloc() must not be called after _PyFrame_Fini()
802 assert(state->numfree != -1);
803 #endif
804 assert(state->numfree > 0);
805 --state->numfree;
806 f = state->free_list;
807 state->free_list = state->free_list->f_back;
808 if (Py_SIZE(f) < extras) {
809 PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras);
810 if (new_f == NULL) {
811 PyObject_GC_Del(f);
812 return NULL;
813 }
814 f = new_f;
815 }
816 _Py_NewReference((PyObject *)f);
817 }
818
819 extras = code->co_nlocals + ncells + nfrees;
820 f->f_valuestack = f->f_localsplus + extras;
821 for (Py_ssize_t i=0; i < extras; i++) {
822 f->f_localsplus[i] = NULL;
823 }
824 return f;
825 }
826
827
828 PyFrameObject* _Py_HOT_FUNCTION
_PyFrame_New_NoTrack(PyThreadState * tstate,PyFrameConstructor * con,PyObject * locals)829 _PyFrame_New_NoTrack(PyThreadState *tstate, PyFrameConstructor *con, PyObject *locals)
830 {
831 assert(con != NULL);
832 assert(con->fc_globals != NULL);
833 assert(con->fc_builtins != NULL);
834 assert(con->fc_code != NULL);
835 assert(locals == NULL || PyMapping_Check(locals));
836
837 PyFrameObject *f = frame_alloc((PyCodeObject *)con->fc_code);
838 if (f == NULL) {
839 return NULL;
840 }
841
842 f->f_back = (PyFrameObject*)Py_XNewRef(tstate->frame);
843 f->f_code = (PyCodeObject *)Py_NewRef(con->fc_code);
844 f->f_builtins = Py_NewRef(con->fc_builtins);
845 f->f_globals = Py_NewRef(con->fc_globals);
846 f->f_locals = Py_XNewRef(locals);
847 // f_valuestack initialized by frame_alloc()
848 f->f_trace = NULL;
849 f->f_stackdepth = 0;
850 f->f_trace_lines = 1;
851 f->f_trace_opcodes = 0;
852 f->f_gen = NULL;
853 f->f_lasti = -1;
854 f->f_lineno = 0;
855 f->f_iblock = 0;
856 f->f_state = FRAME_CREATED;
857 // f_blockstack and f_localsplus initialized by frame_alloc()
858 return f;
859 }
860
861 /* Legacy API */
862 PyFrameObject*
PyFrame_New(PyThreadState * tstate,PyCodeObject * code,PyObject * globals,PyObject * locals)863 PyFrame_New(PyThreadState *tstate, PyCodeObject *code,
864 PyObject *globals, PyObject *locals)
865 {
866 PyObject *builtins = _PyEval_BuiltinsFromGlobals(tstate, globals); // borrowed ref
867 if (builtins == NULL) {
868 return NULL;
869 }
870 PyFrameConstructor desc = {
871 .fc_globals = globals,
872 .fc_builtins = builtins,
873 .fc_name = code->co_name,
874 .fc_qualname = code->co_name,
875 .fc_code = (PyObject *)code,
876 .fc_defaults = NULL,
877 .fc_kwdefaults = NULL,
878 .fc_closure = NULL
879 };
880 PyFrameObject *f = _PyFrame_New_NoTrack(tstate, &desc, locals);
881 if (f) {
882 _PyObject_GC_TRACK(f);
883 }
884 return f;
885 }
886
887
888 /* Block management */
889
890 void
PyFrame_BlockSetup(PyFrameObject * f,int type,int handler,int level)891 PyFrame_BlockSetup(PyFrameObject *f, int type, int handler, int level)
892 {
893 PyTryBlock *b;
894 if (f->f_iblock >= CO_MAXBLOCKS) {
895 Py_FatalError("block stack overflow");
896 }
897 b = &f->f_blockstack[f->f_iblock++];
898 b->b_type = type;
899 b->b_level = level;
900 b->b_handler = handler;
901 }
902
903 PyTryBlock *
PyFrame_BlockPop(PyFrameObject * f)904 PyFrame_BlockPop(PyFrameObject *f)
905 {
906 PyTryBlock *b;
907 if (f->f_iblock <= 0) {
908 Py_FatalError("block stack underflow");
909 }
910 b = &f->f_blockstack[--f->f_iblock];
911 return b;
912 }
913
914 /* Convert between "fast" version of locals and dictionary version.
915
916 map and values are input arguments. map is a tuple of strings.
917 values is an array of PyObject*. At index i, map[i] is the name of
918 the variable with value values[i]. The function copies the first
919 nmap variable from map/values into dict. If values[i] is NULL,
920 the variable is deleted from dict.
921
922 If deref is true, then the values being copied are cell variables
923 and the value is extracted from the cell variable before being put
924 in dict.
925 */
926
927 static int
map_to_dict(PyObject * map,Py_ssize_t nmap,PyObject * dict,PyObject ** values,int deref)928 map_to_dict(PyObject *map, Py_ssize_t nmap, PyObject *dict, PyObject **values,
929 int deref)
930 {
931 Py_ssize_t j;
932 assert(PyTuple_Check(map));
933 assert(PyDict_Check(dict));
934 assert(PyTuple_Size(map) >= nmap);
935 for (j=0; j < nmap; j++) {
936 PyObject *key = PyTuple_GET_ITEM(map, j);
937 PyObject *value = values[j];
938 assert(PyUnicode_Check(key));
939 if (deref && value != NULL) {
940 assert(PyCell_Check(value));
941 value = PyCell_GET(value);
942 }
943 if (value == NULL) {
944 if (PyObject_DelItem(dict, key) != 0) {
945 if (PyErr_ExceptionMatches(PyExc_KeyError))
946 PyErr_Clear();
947 else
948 return -1;
949 }
950 }
951 else {
952 if (PyObject_SetItem(dict, key, value) != 0)
953 return -1;
954 }
955 }
956 return 0;
957 }
958
959 /* Copy values from the "locals" dict into the fast locals.
960
961 dict is an input argument containing string keys representing
962 variables names and arbitrary PyObject* as values.
963
964 map and values are input arguments. map is a tuple of strings.
965 values is an array of PyObject*. At index i, map[i] is the name of
966 the variable with value values[i]. The function copies the first
967 nmap variable from map/values into dict. If values[i] is NULL,
968 the variable is deleted from dict.
969
970 If deref is true, then the values being copied are cell variables
971 and the value is extracted from the cell variable before being put
972 in dict. If clear is true, then variables in map but not in dict
973 are set to NULL in map; if clear is false, variables missing in
974 dict are ignored.
975
976 Exceptions raised while modifying the dict are silently ignored,
977 because there is no good way to report them.
978 */
979
980 static void
dict_to_map(PyObject * map,Py_ssize_t nmap,PyObject * dict,PyObject ** values,int deref,int clear)981 dict_to_map(PyObject *map, Py_ssize_t nmap, PyObject *dict, PyObject **values,
982 int deref, int clear)
983 {
984 Py_ssize_t j;
985 assert(PyTuple_Check(map));
986 assert(PyDict_Check(dict));
987 assert(PyTuple_Size(map) >= nmap);
988 for (j=0; j < nmap; j++) {
989 PyObject *key = PyTuple_GET_ITEM(map, j);
990 PyObject *value = PyObject_GetItem(dict, key);
991 assert(PyUnicode_Check(key));
992 /* We only care about NULLs if clear is true. */
993 if (value == NULL) {
994 PyErr_Clear();
995 if (!clear)
996 continue;
997 }
998 if (deref) {
999 assert(PyCell_Check(values[j]));
1000 if (PyCell_GET(values[j]) != value) {
1001 if (PyCell_Set(values[j], value) < 0)
1002 PyErr_Clear();
1003 }
1004 } else if (values[j] != value) {
1005 Py_XINCREF(value);
1006 Py_XSETREF(values[j], value);
1007 }
1008 Py_XDECREF(value);
1009 }
1010 }
1011
1012 int
PyFrame_FastToLocalsWithError(PyFrameObject * f)1013 PyFrame_FastToLocalsWithError(PyFrameObject *f)
1014 {
1015 /* Merge fast locals into f->f_locals */
1016 PyObject *locals, *map;
1017 PyObject **fast;
1018 PyCodeObject *co;
1019 Py_ssize_t j;
1020 Py_ssize_t ncells, nfreevars;
1021
1022 if (f == NULL) {
1023 PyErr_BadInternalCall();
1024 return -1;
1025 }
1026 locals = f->f_locals;
1027 if (locals == NULL) {
1028 locals = f->f_locals = PyDict_New();
1029 if (locals == NULL)
1030 return -1;
1031 }
1032 co = f->f_code;
1033 map = co->co_varnames;
1034 if (!PyTuple_Check(map)) {
1035 PyErr_Format(PyExc_SystemError,
1036 "co_varnames must be a tuple, not %s",
1037 Py_TYPE(map)->tp_name);
1038 return -1;
1039 }
1040 fast = f->f_localsplus;
1041 j = PyTuple_GET_SIZE(map);
1042 if (j > co->co_nlocals)
1043 j = co->co_nlocals;
1044 if (co->co_nlocals) {
1045 if (map_to_dict(map, j, locals, fast, 0) < 0)
1046 return -1;
1047 }
1048 ncells = PyTuple_GET_SIZE(co->co_cellvars);
1049 nfreevars = PyTuple_GET_SIZE(co->co_freevars);
1050 if (ncells || nfreevars) {
1051 if (map_to_dict(co->co_cellvars, ncells,
1052 locals, fast + co->co_nlocals, 1))
1053 return -1;
1054
1055 /* If the namespace is unoptimized, then one of the
1056 following cases applies:
1057 1. It does not contain free variables, because it
1058 uses import * or is a top-level namespace.
1059 2. It is a class namespace.
1060 We don't want to accidentally copy free variables
1061 into the locals dict used by the class.
1062 */
1063 if (co->co_flags & CO_OPTIMIZED) {
1064 if (map_to_dict(co->co_freevars, nfreevars,
1065 locals, fast + co->co_nlocals + ncells, 1) < 0)
1066 return -1;
1067 }
1068 }
1069 return 0;
1070 }
1071
1072 void
PyFrame_FastToLocals(PyFrameObject * f)1073 PyFrame_FastToLocals(PyFrameObject *f)
1074 {
1075 int res;
1076
1077 assert(!PyErr_Occurred());
1078
1079 res = PyFrame_FastToLocalsWithError(f);
1080 if (res < 0)
1081 PyErr_Clear();
1082 }
1083
1084 void
PyFrame_LocalsToFast(PyFrameObject * f,int clear)1085 PyFrame_LocalsToFast(PyFrameObject *f, int clear)
1086 {
1087 /* Merge f->f_locals into fast locals */
1088 PyObject *locals, *map;
1089 PyObject **fast;
1090 PyObject *error_type, *error_value, *error_traceback;
1091 PyCodeObject *co;
1092 Py_ssize_t j;
1093 Py_ssize_t ncells, nfreevars;
1094 if (f == NULL)
1095 return;
1096 locals = f->f_locals;
1097 co = f->f_code;
1098 map = co->co_varnames;
1099 if (locals == NULL)
1100 return;
1101 if (!PyTuple_Check(map))
1102 return;
1103 PyErr_Fetch(&error_type, &error_value, &error_traceback);
1104 fast = f->f_localsplus;
1105 j = PyTuple_GET_SIZE(map);
1106 if (j > co->co_nlocals)
1107 j = co->co_nlocals;
1108 if (co->co_nlocals)
1109 dict_to_map(co->co_varnames, j, locals, fast, 0, clear);
1110 ncells = PyTuple_GET_SIZE(co->co_cellvars);
1111 nfreevars = PyTuple_GET_SIZE(co->co_freevars);
1112 if (ncells || nfreevars) {
1113 dict_to_map(co->co_cellvars, ncells,
1114 locals, fast + co->co_nlocals, 1, clear);
1115 /* Same test as in PyFrame_FastToLocals() above. */
1116 if (co->co_flags & CO_OPTIMIZED) {
1117 dict_to_map(co->co_freevars, nfreevars,
1118 locals, fast + co->co_nlocals + ncells, 1,
1119 clear);
1120 }
1121 }
1122 PyErr_Restore(error_type, error_value, error_traceback);
1123 }
1124
1125 /* Clear out the free list */
1126 void
_PyFrame_ClearFreeList(PyInterpreterState * interp)1127 _PyFrame_ClearFreeList(PyInterpreterState *interp)
1128 {
1129 struct _Py_frame_state *state = &interp->frame;
1130 while (state->free_list != NULL) {
1131 PyFrameObject *f = state->free_list;
1132 state->free_list = state->free_list->f_back;
1133 PyObject_GC_Del(f);
1134 --state->numfree;
1135 }
1136 assert(state->numfree == 0);
1137 }
1138
1139 void
_PyFrame_Fini(PyInterpreterState * interp)1140 _PyFrame_Fini(PyInterpreterState *interp)
1141 {
1142 _PyFrame_ClearFreeList(interp);
1143 #ifdef Py_DEBUG
1144 struct _Py_frame_state *state = &interp->frame;
1145 state->numfree = -1;
1146 #endif
1147 }
1148
1149 /* Print summary info about the state of the optimized allocator */
1150 void
_PyFrame_DebugMallocStats(FILE * out)1151 _PyFrame_DebugMallocStats(FILE *out)
1152 {
1153 struct _Py_frame_state *state = get_frame_state();
1154 _PyDebugAllocatorStats(out,
1155 "free PyFrameObject",
1156 state->numfree, sizeof(PyFrameObject));
1157 }
1158
1159
1160 PyCodeObject *
PyFrame_GetCode(PyFrameObject * frame)1161 PyFrame_GetCode(PyFrameObject *frame)
1162 {
1163 assert(frame != NULL);
1164 PyCodeObject *code = frame->f_code;
1165 assert(code != NULL);
1166 Py_INCREF(code);
1167 return code;
1168 }
1169
1170
1171 PyFrameObject*
PyFrame_GetBack(PyFrameObject * frame)1172 PyFrame_GetBack(PyFrameObject *frame)
1173 {
1174 assert(frame != NULL);
1175 PyFrameObject *back = frame->f_back;
1176 Py_XINCREF(back);
1177 return back;
1178 }
1179
1180 PyObject*
_PyEval_BuiltinsFromGlobals(PyThreadState * tstate,PyObject * globals)1181 _PyEval_BuiltinsFromGlobals(PyThreadState *tstate, PyObject *globals)
1182 {
1183 PyObject *builtins = _PyDict_GetItemIdWithError(globals, &PyId___builtins__);
1184 if (builtins) {
1185 if (PyModule_Check(builtins)) {
1186 builtins = _PyModule_GetDict(builtins);
1187 assert(builtins != NULL);
1188 }
1189 return builtins;
1190 }
1191 if (PyErr_Occurred()) {
1192 return NULL;
1193 }
1194
1195 return _PyEval_GetBuiltins(tstate);
1196 }
1197