1 /* Frame object implementation */
2
3 #include "Python.h"
4 #include "pycore_object.h"
5 #include "pycore_gc.h" // _PyObject_GC_IS_TRACKED()
6
7 #include "code.h"
8 #include "frameobject.h"
9 #include "opcode.h"
10 #include "structmember.h" // PyMemberDef
11
12 #define OFF(x) offsetof(PyFrameObject, x)
13
14 static PyMemberDef frame_memberlist[] = {
15 {"f_back", T_OBJECT, OFF(f_back), READONLY},
16 {"f_code", T_OBJECT, OFF(f_code), READONLY},
17 {"f_builtins", T_OBJECT, OFF(f_builtins), READONLY},
18 {"f_globals", T_OBJECT, OFF(f_globals), READONLY},
19 {"f_lasti", T_INT, OFF(f_lasti), READONLY},
20 {"f_trace_lines", T_BOOL, OFF(f_trace_lines), 0},
21 {"f_trace_opcodes", T_BOOL, OFF(f_trace_opcodes), 0},
22 {NULL} /* Sentinel */
23 };
24
25 static PyObject *
frame_getlocals(PyFrameObject * f,void * closure)26 frame_getlocals(PyFrameObject *f, void *closure)
27 {
28 if (PyFrame_FastToLocalsWithError(f) < 0)
29 return NULL;
30 Py_INCREF(f->f_locals);
31 return f->f_locals;
32 }
33
34 int
PyFrame_GetLineNumber(PyFrameObject * f)35 PyFrame_GetLineNumber(PyFrameObject *f)
36 {
37 assert(f != NULL);
38 if (f->f_trace) {
39 return f->f_lineno;
40 }
41 else {
42 return PyCode_Addr2Line(f->f_code, f->f_lasti);
43 }
44 }
45
46 static PyObject *
frame_getlineno(PyFrameObject * f,void * closure)47 frame_getlineno(PyFrameObject *f, void *closure)
48 {
49 return PyLong_FromLong(PyFrame_GetLineNumber(f));
50 }
51
52
53 /* Given the index of the effective opcode,
54 scan back to construct the oparg with EXTENDED_ARG */
55 static unsigned int
get_arg(const _Py_CODEUNIT * codestr,Py_ssize_t i)56 get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i)
57 {
58 _Py_CODEUNIT word;
59 unsigned int oparg = _Py_OPARG(codestr[i]);
60 if (i >= 1 && _Py_OPCODE(word = codestr[i-1]) == EXTENDED_ARG) {
61 oparg |= _Py_OPARG(word) << 8;
62 if (i >= 2 && _Py_OPCODE(word = codestr[i-2]) == EXTENDED_ARG) {
63 oparg |= _Py_OPARG(word) << 16;
64 if (i >= 3 && _Py_OPCODE(word = codestr[i-3]) == EXTENDED_ARG) {
65 oparg |= _Py_OPARG(word) << 24;
66 }
67 }
68 }
69 return oparg;
70 }
71
72 typedef enum kind {
73 With = 1,
74 Loop = 2,
75 Try = 3,
76 Except = 4,
77 } Kind;
78
79 #define BITS_PER_BLOCK 3
80
81 static inline int64_t
push_block(int64_t stack,Kind kind)82 push_block(int64_t stack, Kind kind)
83 {
84 assert(stack < ((int64_t)1)<<(BITS_PER_BLOCK*CO_MAXBLOCKS));
85 return (stack << BITS_PER_BLOCK) | kind;
86 }
87
88 static inline int64_t
pop_block(int64_t stack)89 pop_block(int64_t stack)
90 {
91 assert(stack > 0);
92 return stack >> BITS_PER_BLOCK;
93 }
94
95 static inline Kind
top_block(int64_t stack)96 top_block(int64_t stack)
97 {
98 return stack & ((1<<BITS_PER_BLOCK)-1);
99 }
100
101 static int64_t *
markblocks(PyCodeObject * code_obj,int len)102 markblocks(PyCodeObject *code_obj, int len)
103 {
104 const _Py_CODEUNIT *code =
105 (const _Py_CODEUNIT *)PyBytes_AS_STRING(code_obj->co_code);
106 int64_t *blocks = PyMem_New(int64_t, len+1);
107 int i, j, opcode;
108
109 if (blocks == NULL) {
110 PyErr_NoMemory();
111 return NULL;
112 }
113 memset(blocks, -1, (len+1)*sizeof(int64_t));
114 blocks[0] = 0;
115 int todo = 1;
116 while (todo) {
117 todo = 0;
118 for (i = 0; i < len; i++) {
119 int64_t block_stack = blocks[i];
120 int64_t except_stack;
121 if (block_stack == -1) {
122 continue;
123 }
124 opcode = _Py_OPCODE(code[i]);
125 switch (opcode) {
126 case JUMP_IF_FALSE_OR_POP:
127 case JUMP_IF_TRUE_OR_POP:
128 case POP_JUMP_IF_FALSE:
129 case POP_JUMP_IF_TRUE:
130 case JUMP_IF_NOT_EXC_MATCH:
131 j = get_arg(code, i) / sizeof(_Py_CODEUNIT);
132 assert(j < len);
133 if (blocks[j] == -1 && j < i) {
134 todo = 1;
135 }
136 assert(blocks[j] == -1 || blocks[j] == block_stack);
137 blocks[j] = block_stack;
138 blocks[i+1] = block_stack;
139 break;
140 case JUMP_ABSOLUTE:
141 j = get_arg(code, i) / sizeof(_Py_CODEUNIT);
142 assert(j < len);
143 if (blocks[j] == -1 && j < i) {
144 todo = 1;
145 }
146 assert(blocks[j] == -1 || blocks[j] == block_stack);
147 blocks[j] = block_stack;
148 break;
149 case SETUP_FINALLY:
150 j = get_arg(code, i) / sizeof(_Py_CODEUNIT) + i + 1;
151 assert(j < len);
152 except_stack = push_block(block_stack, Except);
153 assert(blocks[j] == -1 || blocks[j] == except_stack);
154 blocks[j] = except_stack;
155 block_stack = push_block(block_stack, Try);
156 blocks[i+1] = block_stack;
157 break;
158 case SETUP_WITH:
159 case SETUP_ASYNC_WITH:
160 j = get_arg(code, i) / sizeof(_Py_CODEUNIT) + i + 1;
161 assert(j < len);
162 except_stack = push_block(block_stack, Except);
163 assert(blocks[j] == -1 || blocks[j] == except_stack);
164 blocks[j] = except_stack;
165 block_stack = push_block(block_stack, With);
166 blocks[i+1] = block_stack;
167 break;
168 case JUMP_FORWARD:
169 j = get_arg(code, i) / sizeof(_Py_CODEUNIT) + i + 1;
170 assert(j < len);
171 assert(blocks[j] == -1 || blocks[j] == block_stack);
172 blocks[j] = block_stack;
173 break;
174 case GET_ITER:
175 case GET_AITER:
176 block_stack = push_block(block_stack, Loop);
177 blocks[i+1] = block_stack;
178 break;
179 case FOR_ITER:
180 blocks[i+1] = block_stack;
181 block_stack = pop_block(block_stack);
182 j = get_arg(code, i) / sizeof(_Py_CODEUNIT) + i + 1;
183 assert(j < len);
184 assert(blocks[j] == -1 || blocks[j] == block_stack);
185 blocks[j] = block_stack;
186 break;
187 case POP_BLOCK:
188 case POP_EXCEPT:
189 block_stack = pop_block(block_stack);
190 blocks[i+1] = block_stack;
191 break;
192 case END_ASYNC_FOR:
193 block_stack = pop_block(pop_block(block_stack));
194 blocks[i+1] = block_stack;
195 break;
196 case RETURN_VALUE:
197 case RAISE_VARARGS:
198 case RERAISE:
199 /* End of block */
200 break;
201 default:
202 blocks[i+1] = block_stack;
203
204 }
205 }
206 }
207 return blocks;
208 }
209
210 static int
compatible_block_stack(int64_t from_stack,int64_t to_stack)211 compatible_block_stack(int64_t from_stack, int64_t to_stack)
212 {
213 if (to_stack < 0) {
214 return 0;
215 }
216 while(from_stack > to_stack) {
217 from_stack = pop_block(from_stack);
218 }
219 return from_stack == to_stack;
220 }
221
222 static const char *
explain_incompatible_block_stack(int64_t to_stack)223 explain_incompatible_block_stack(int64_t to_stack)
224 {
225 Kind target_kind = top_block(to_stack);
226 switch(target_kind) {
227 case Except:
228 return "can't jump into an 'except' block as there's no exception";
229 case Try:
230 return "can't jump into the body of a try statement";
231 case With:
232 return "can't jump into the body of a with statement";
233 case Loop:
234 return "can't jump into the body of a for loop";
235 default:
236 Py_UNREACHABLE();
237 }
238 }
239
240 static int *
marklines(PyCodeObject * code,int len)241 marklines(PyCodeObject *code, int len)
242 {
243 int *linestarts = PyMem_New(int, len);
244 if (linestarts == NULL) {
245 return NULL;
246 }
247 Py_ssize_t size = PyBytes_GET_SIZE(code->co_lnotab) / 2;
248 unsigned char *p = (unsigned char*)PyBytes_AS_STRING(code->co_lnotab);
249 int line = code->co_firstlineno;
250 int addr = 0;
251 int index = 0;
252 while (--size >= 0) {
253 addr += *p++;
254 if (index*2 < addr) {
255 linestarts[index++] = line;
256 }
257 while (index*2 < addr) {
258 linestarts[index++] = -1;
259 if (index >= len) {
260 break;
261 }
262 }
263 line += (signed char)*p;
264 p++;
265 }
266 if (index < len) {
267 linestarts[index++] = line;
268 }
269 while (index < len) {
270 linestarts[index++] = -1;
271 }
272 assert(index == len);
273 return linestarts;
274 }
275
276 static int
first_line_not_before(int * lines,int len,int line)277 first_line_not_before(int *lines, int len, int line)
278 {
279 int result = INT_MAX;
280 for (int i = 0; i < len; i++) {
281 if (lines[i] < result && lines[i] >= line) {
282 result = lines[i];
283 }
284 }
285 if (result == INT_MAX) {
286 return -1;
287 }
288 return result;
289 }
290
291 static void
frame_stack_pop(PyFrameObject * f)292 frame_stack_pop(PyFrameObject *f)
293 {
294 PyObject *v = (*--f->f_stacktop);
295 Py_DECREF(v);
296 }
297
298 static void
frame_block_unwind(PyFrameObject * f)299 frame_block_unwind(PyFrameObject *f)
300 {
301 assert(f->f_iblock > 0);
302 f->f_iblock--;
303 PyTryBlock *b = &f->f_blockstack[f->f_iblock];
304 intptr_t delta = (f->f_stacktop - f->f_valuestack) - b->b_level;
305 while (delta > 0) {
306 frame_stack_pop(f);
307 delta--;
308 }
309 }
310
311
312 /* Setter for f_lineno - you can set f_lineno from within a trace function in
313 * order to jump to a given line of code, subject to some restrictions. Most
314 * lines are OK to jump to because they don't make any assumptions about the
315 * state of the stack (obvious because you could remove the line and the code
316 * would still work without any stack errors), but there are some constructs
317 * that limit jumping:
318 *
319 * o Lines with an 'except' statement on them can't be jumped to, because
320 * they expect an exception to be on the top of the stack.
321 * o Lines that live in a 'finally' block can't be jumped from or to, since
322 * we cannot be sure which state the interpreter was in or would be in
323 * during execution of the finally block.
324 * o 'try', 'with' and 'async with' blocks can't be jumped into because
325 * the blockstack needs to be set up before their code runs.
326 * o 'for' and 'async for' loops can't be jumped into because the
327 * iterator needs to be on the stack.
328 * o Jumps cannot be made from within a trace function invoked with a
329 * 'return' or 'exception' event since the eval loop has been exited at
330 * that time.
331 */
332 static int
frame_setlineno(PyFrameObject * f,PyObject * p_new_lineno,void * Py_UNUSED (ignored))333 frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignored))
334 {
335 if (p_new_lineno == NULL) {
336 PyErr_SetString(PyExc_AttributeError, "cannot delete attribute");
337 return -1;
338 }
339 /* f_lineno must be an integer. */
340 if (!PyLong_CheckExact(p_new_lineno)) {
341 PyErr_SetString(PyExc_ValueError,
342 "lineno must be an integer");
343 return -1;
344 }
345
346 /* Upon the 'call' trace event of a new frame, f->f_lasti is -1 and
347 * f->f_trace is NULL, check first on the first condition.
348 * Forbidding jumps from the 'call' event of a new frame is a side effect
349 * of allowing to set f_lineno only from trace functions. */
350 if (f->f_lasti == -1) {
351 PyErr_Format(PyExc_ValueError,
352 "can't jump from the 'call' trace event of a new frame");
353 return -1;
354 }
355
356 /* You can only do this from within a trace function, not via
357 * _getframe or similar hackery. */
358 if (!f->f_trace) {
359 PyErr_Format(PyExc_ValueError,
360 "f_lineno can only be set by a trace function");
361 return -1;
362 }
363
364 /* Forbid jumps upon a 'return' trace event (except after executing a
365 * YIELD_VALUE or YIELD_FROM opcode, f_stacktop is not NULL in that case)
366 * and upon an 'exception' trace event.
367 * Jumps from 'call' trace events have already been forbidden above for new
368 * frames, so this check does not change anything for 'call' events. */
369 if (f->f_stacktop == NULL) {
370 PyErr_SetString(PyExc_ValueError,
371 "can only jump from a 'line' trace event");
372 return -1;
373 }
374
375 int new_lineno;
376
377 /* Fail if the line falls outside the code block and
378 select first line with actual code. */
379 int overflow;
380 long l_new_lineno = PyLong_AsLongAndOverflow(p_new_lineno, &overflow);
381 if (overflow
382 #if SIZEOF_LONG > SIZEOF_INT
383 || l_new_lineno > INT_MAX
384 || l_new_lineno < INT_MIN
385 #endif
386 ) {
387 PyErr_SetString(PyExc_ValueError,
388 "lineno out of range");
389 return -1;
390 }
391 new_lineno = (int)l_new_lineno;
392
393 if (new_lineno < f->f_code->co_firstlineno) {
394 PyErr_Format(PyExc_ValueError,
395 "line %d comes before the current code block",
396 new_lineno);
397 return -1;
398 }
399
400 int len = PyBytes_GET_SIZE(f->f_code->co_code)/sizeof(_Py_CODEUNIT);
401 int *lines = marklines(f->f_code, len);
402 if (lines == NULL) {
403 return -1;
404 }
405
406 new_lineno = first_line_not_before(lines, len, new_lineno);
407 if (new_lineno < 0) {
408 PyErr_Format(PyExc_ValueError,
409 "line %d comes after the current code block",
410 (int)l_new_lineno);
411 PyMem_Free(lines);
412 return -1;
413 }
414
415 int64_t *blocks = markblocks(f->f_code, len);
416 if (blocks == NULL) {
417 PyMem_Free(lines);
418 return -1;
419 }
420
421 int64_t target_block_stack = -1;
422 int64_t best_block_stack = -1;
423 int best_addr = -1;
424 int64_t start_block_stack = blocks[f->f_lasti/sizeof(_Py_CODEUNIT)];
425 const char *msg = "cannot find bytecode for specified line";
426 for (int i = 0; i < len; i++) {
427 if (lines[i] == new_lineno) {
428 target_block_stack = blocks[i];
429 if (compatible_block_stack(start_block_stack, target_block_stack)) {
430 msg = NULL;
431 if (target_block_stack > best_block_stack) {
432 best_block_stack = target_block_stack;
433 best_addr = i*sizeof(_Py_CODEUNIT);
434 }
435 }
436 else if (msg) {
437 if (target_block_stack >= 0) {
438 msg = explain_incompatible_block_stack(target_block_stack);
439 }
440 else {
441 msg = "code may be unreachable.";
442 }
443 }
444 }
445 }
446 PyMem_Free(blocks);
447 PyMem_Free(lines);
448 if (msg != NULL) {
449 PyErr_SetString(PyExc_ValueError, msg);
450 return -1;
451 }
452
453 /* Unwind block stack. */
454 while (start_block_stack > best_block_stack) {
455 Kind kind = top_block(start_block_stack);
456 switch(kind) {
457 case Loop:
458 frame_stack_pop(f);
459 break;
460 case Try:
461 frame_block_unwind(f);
462 break;
463 case With:
464 frame_block_unwind(f);
465 // Pop the exit function
466 frame_stack_pop(f);
467 break;
468 case Except:
469 PyErr_SetString(PyExc_ValueError,
470 "can't jump out of an 'except' block");
471 return -1;
472 }
473 start_block_stack = pop_block(start_block_stack);
474 }
475
476 /* Finally set the new f_lineno and f_lasti and return OK. */
477 f->f_lineno = new_lineno;
478 f->f_lasti = best_addr;
479 return 0;
480 }
481
482 static PyObject *
frame_gettrace(PyFrameObject * f,void * closure)483 frame_gettrace(PyFrameObject *f, void *closure)
484 {
485 PyObject* trace = f->f_trace;
486
487 if (trace == NULL)
488 trace = Py_None;
489
490 Py_INCREF(trace);
491
492 return trace;
493 }
494
495 static int
frame_settrace(PyFrameObject * f,PyObject * v,void * closure)496 frame_settrace(PyFrameObject *f, PyObject* v, void *closure)
497 {
498 /* We rely on f_lineno being accurate when f_trace is set. */
499 f->f_lineno = PyFrame_GetLineNumber(f);
500
501 if (v == Py_None)
502 v = NULL;
503 Py_XINCREF(v);
504 Py_XSETREF(f->f_trace, v);
505
506 return 0;
507 }
508
509
510 static PyGetSetDef frame_getsetlist[] = {
511 {"f_locals", (getter)frame_getlocals, NULL, NULL},
512 {"f_lineno", (getter)frame_getlineno,
513 (setter)frame_setlineno, NULL},
514 {"f_trace", (getter)frame_gettrace, (setter)frame_settrace, NULL},
515 {0}
516 };
517
518 /* Stack frames are allocated and deallocated at a considerable rate.
519 In an attempt to improve the speed of function calls, we:
520
521 1. Hold a single "zombie" frame on each code object. This retains
522 the allocated and initialised frame object from an invocation of
523 the code object. The zombie is reanimated the next time we need a
524 frame object for that code object. Doing this saves the malloc/
525 realloc required when using a free_list frame that isn't the
526 correct size. It also saves some field initialisation.
527
528 In zombie mode, no field of PyFrameObject holds a reference, but
529 the following fields are still valid:
530
531 * ob_type, ob_size, f_code, f_valuestack;
532
533 * f_locals, f_trace are NULL;
534
535 * f_localsplus does not require re-allocation and
536 the local variables in f_localsplus are NULL.
537
538 2. We also maintain a separate free list of stack frames (just like
539 floats are allocated in a special way -- see floatobject.c). When
540 a stack frame is on the free list, only the following members have
541 a meaning:
542 ob_type == &Frametype
543 f_back next item on free list, or NULL
544 f_stacksize size of value stack
545 ob_size size of localsplus
546 Note that the value and block stacks are preserved -- this can save
547 another malloc() call or two (and two free() calls as well!).
548 Also note that, unlike for integers, each frame object is a
549 malloc'ed object in its own right -- it is only the actual calls to
550 malloc() that we are trying to save here, not the administration.
551 After all, while a typical program may make millions of calls, a
552 call depth of more than 20 or 30 is probably already exceptional
553 unless the program contains run-away recursion. I hope.
554
555 Later, PyFrame_MAXFREELIST was added to bound the # of frames saved on
556 free_list. Else programs creating lots of cyclic trash involving
557 frames could provoke free_list into growing without bound.
558 */
559 /* max value for numfree */
560 #define PyFrame_MAXFREELIST 200
561
562 #if PyFrame_MAXFREELIST > 0
563 static PyFrameObject *free_list = NULL;
564 static int numfree = 0; /* number of frames currently in free_list */
565 #endif
566
567 static void _Py_HOT_FUNCTION
frame_dealloc(PyFrameObject * f)568 frame_dealloc(PyFrameObject *f)
569 {
570 PyObject **p, **valuestack;
571 PyCodeObject *co;
572
573 if (_PyObject_GC_IS_TRACKED(f))
574 _PyObject_GC_UNTRACK(f);
575
576 Py_TRASHCAN_SAFE_BEGIN(f)
577 /* Kill all local variables */
578 valuestack = f->f_valuestack;
579 for (p = f->f_localsplus; p < valuestack; p++)
580 Py_CLEAR(*p);
581
582 /* Free stack */
583 if (f->f_stacktop != NULL) {
584 for (p = valuestack; p < f->f_stacktop; p++)
585 Py_XDECREF(*p);
586 }
587
588 Py_XDECREF(f->f_back);
589 Py_DECREF(f->f_builtins);
590 Py_DECREF(f->f_globals);
591 Py_CLEAR(f->f_locals);
592 Py_CLEAR(f->f_trace);
593
594 co = f->f_code;
595 if (co->co_zombieframe == NULL) {
596 co->co_zombieframe = f;
597 }
598 #if PyFrame_MAXFREELIST > 0
599 else if (numfree < PyFrame_MAXFREELIST) {
600 ++numfree;
601 f->f_back = free_list;
602 free_list = f;
603 }
604 #endif
605 else {
606 PyObject_GC_Del(f);
607 }
608
609 Py_DECREF(co);
610 Py_TRASHCAN_SAFE_END(f)
611 }
612
613 static inline Py_ssize_t
frame_nslots(PyFrameObject * frame)614 frame_nslots(PyFrameObject *frame)
615 {
616 PyCodeObject *code = frame->f_code;
617 return (code->co_nlocals
618 + PyTuple_GET_SIZE(code->co_cellvars)
619 + PyTuple_GET_SIZE(code->co_freevars));
620 }
621
622 static int
frame_traverse(PyFrameObject * f,visitproc visit,void * arg)623 frame_traverse(PyFrameObject *f, visitproc visit, void *arg)
624 {
625 Py_VISIT(f->f_back);
626 Py_VISIT(f->f_code);
627 Py_VISIT(f->f_builtins);
628 Py_VISIT(f->f_globals);
629 Py_VISIT(f->f_locals);
630 Py_VISIT(f->f_trace);
631
632 /* locals */
633 PyObject **fastlocals = f->f_localsplus;
634 for (Py_ssize_t i = frame_nslots(f); --i >= 0; ++fastlocals) {
635 Py_VISIT(*fastlocals);
636 }
637
638 /* stack */
639 if (f->f_stacktop != NULL) {
640 for (PyObject **p = f->f_valuestack; p < f->f_stacktop; p++) {
641 Py_VISIT(*p);
642 }
643 }
644 return 0;
645 }
646
647 static int
frame_tp_clear(PyFrameObject * f)648 frame_tp_clear(PyFrameObject *f)
649 {
650 /* Before anything else, make sure that this frame is clearly marked
651 * as being defunct! Else, e.g., a generator reachable from this
652 * frame may also point to this frame, believe itself to still be
653 * active, and try cleaning up this frame again.
654 */
655 PyObject **oldtop = f->f_stacktop;
656 f->f_stacktop = NULL;
657 f->f_executing = 0;
658
659 Py_CLEAR(f->f_trace);
660
661 /* locals */
662 PyObject **fastlocals = f->f_localsplus;
663 for (Py_ssize_t i = frame_nslots(f); --i >= 0; ++fastlocals) {
664 Py_CLEAR(*fastlocals);
665 }
666
667 /* stack */
668 if (oldtop != NULL) {
669 for (PyObject **p = f->f_valuestack; p < oldtop; p++) {
670 Py_CLEAR(*p);
671 }
672 }
673 return 0;
674 }
675
676 static PyObject *
frame_clear(PyFrameObject * f,PyObject * Py_UNUSED (ignored))677 frame_clear(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
678 {
679 if (f->f_executing) {
680 PyErr_SetString(PyExc_RuntimeError,
681 "cannot clear an executing frame");
682 return NULL;
683 }
684 if (f->f_gen) {
685 _PyGen_Finalize(f->f_gen);
686 assert(f->f_gen == NULL);
687 }
688 (void)frame_tp_clear(f);
689 Py_RETURN_NONE;
690 }
691
692 PyDoc_STRVAR(clear__doc__,
693 "F.clear(): clear most references held by the frame");
694
695 static PyObject *
frame_sizeof(PyFrameObject * f,PyObject * Py_UNUSED (ignored))696 frame_sizeof(PyFrameObject *f, PyObject *Py_UNUSED(ignored))
697 {
698 Py_ssize_t res, extras, ncells, nfrees;
699
700 PyCodeObject *code = f->f_code;
701 ncells = PyTuple_GET_SIZE(code->co_cellvars);
702 nfrees = PyTuple_GET_SIZE(code->co_freevars);
703 extras = code->co_stacksize + code->co_nlocals + ncells + nfrees;
704 /* subtract one as it is already included in PyFrameObject */
705 res = sizeof(PyFrameObject) + (extras-1) * sizeof(PyObject *);
706
707 return PyLong_FromSsize_t(res);
708 }
709
710 PyDoc_STRVAR(sizeof__doc__,
711 "F.__sizeof__() -> size of F in memory, in bytes");
712
713 static PyObject *
frame_repr(PyFrameObject * f)714 frame_repr(PyFrameObject *f)
715 {
716 int lineno = PyFrame_GetLineNumber(f);
717 PyCodeObject *code = f->f_code;
718 return PyUnicode_FromFormat(
719 "<frame at %p, file %R, line %d, code %S>",
720 f, code->co_filename, lineno, code->co_name);
721 }
722
723 static PyMethodDef frame_methods[] = {
724 {"clear", (PyCFunction)frame_clear, METH_NOARGS,
725 clear__doc__},
726 {"__sizeof__", (PyCFunction)frame_sizeof, METH_NOARGS,
727 sizeof__doc__},
728 {NULL, NULL} /* sentinel */
729 };
730
731 PyTypeObject PyFrame_Type = {
732 PyVarObject_HEAD_INIT(&PyType_Type, 0)
733 "frame",
734 sizeof(PyFrameObject),
735 sizeof(PyObject *),
736 (destructor)frame_dealloc, /* tp_dealloc */
737 0, /* tp_vectorcall_offset */
738 0, /* tp_getattr */
739 0, /* tp_setattr */
740 0, /* tp_as_async */
741 (reprfunc)frame_repr, /* tp_repr */
742 0, /* tp_as_number */
743 0, /* tp_as_sequence */
744 0, /* tp_as_mapping */
745 0, /* tp_hash */
746 0, /* tp_call */
747 0, /* tp_str */
748 PyObject_GenericGetAttr, /* tp_getattro */
749 PyObject_GenericSetAttr, /* tp_setattro */
750 0, /* tp_as_buffer */
751 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
752 0, /* tp_doc */
753 (traverseproc)frame_traverse, /* tp_traverse */
754 (inquiry)frame_tp_clear, /* tp_clear */
755 0, /* tp_richcompare */
756 0, /* tp_weaklistoffset */
757 0, /* tp_iter */
758 0, /* tp_iternext */
759 frame_methods, /* tp_methods */
760 frame_memberlist, /* tp_members */
761 frame_getsetlist, /* tp_getset */
762 0, /* tp_base */
763 0, /* tp_dict */
764 };
765
766 _Py_IDENTIFIER(__builtins__);
767
768 static inline PyFrameObject*
frame_alloc(PyCodeObject * code)769 frame_alloc(PyCodeObject *code)
770 {
771 PyFrameObject *f;
772
773 f = code->co_zombieframe;
774 if (f != NULL) {
775 code->co_zombieframe = NULL;
776 _Py_NewReference((PyObject *)f);
777 assert(f->f_code == code);
778 return f;
779 }
780
781 Py_ssize_t ncells = PyTuple_GET_SIZE(code->co_cellvars);
782 Py_ssize_t nfrees = PyTuple_GET_SIZE(code->co_freevars);
783 Py_ssize_t extras = code->co_stacksize + code->co_nlocals + ncells + nfrees;
784 #if PyFrame_MAXFREELIST > 0
785 if (free_list == NULL)
786 #endif
787 {
788 f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras);
789 if (f == NULL) {
790 return NULL;
791 }
792 }
793 #if PyFrame_MAXFREELIST > 0
794 else {
795 assert(numfree > 0);
796 --numfree;
797 f = free_list;
798 free_list = free_list->f_back;
799 if (Py_SIZE(f) < extras) {
800 PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras);
801 if (new_f == NULL) {
802 PyObject_GC_Del(f);
803 return NULL;
804 }
805 f = new_f;
806 }
807 _Py_NewReference((PyObject *)f);
808 }
809 #endif
810
811 f->f_code = code;
812 extras = code->co_nlocals + ncells + nfrees;
813 f->f_valuestack = f->f_localsplus + extras;
814 for (Py_ssize_t i=0; i<extras; i++) {
815 f->f_localsplus[i] = NULL;
816 }
817 f->f_locals = NULL;
818 f->f_trace = NULL;
819 return f;
820 }
821
822
823 static inline PyObject *
frame_get_builtins(PyFrameObject * back,PyObject * globals)824 frame_get_builtins(PyFrameObject *back, PyObject *globals)
825 {
826 PyObject *builtins;
827
828 if (back != NULL && back->f_globals == globals) {
829 /* If we share the globals, we share the builtins.
830 Save a lookup and a call. */
831 builtins = back->f_builtins;
832 assert(builtins != NULL);
833 Py_INCREF(builtins);
834 return builtins;
835 }
836
837 builtins = _PyDict_GetItemIdWithError(globals, &PyId___builtins__);
838 if (builtins != NULL && PyModule_Check(builtins)) {
839 builtins = PyModule_GetDict(builtins);
840 assert(builtins != NULL);
841 }
842 if (builtins != NULL) {
843 Py_INCREF(builtins);
844 return builtins;
845 }
846
847 if (PyErr_Occurred()) {
848 return NULL;
849 }
850
851 /* No builtins! Make up a minimal one.
852 Give them 'None', at least. */
853 builtins = PyDict_New();
854 if (builtins == NULL) {
855 return NULL;
856 }
857 if (PyDict_SetItemString(builtins, "None", Py_None) < 0) {
858 Py_DECREF(builtins);
859 return NULL;
860 }
861 return builtins;
862 }
863
864
865 PyFrameObject* _Py_HOT_FUNCTION
_PyFrame_New_NoTrack(PyThreadState * tstate,PyCodeObject * code,PyObject * globals,PyObject * locals)866 _PyFrame_New_NoTrack(PyThreadState *tstate, PyCodeObject *code,
867 PyObject *globals, PyObject *locals)
868 {
869 #ifdef Py_DEBUG
870 if (code == NULL || globals == NULL || !PyDict_Check(globals) ||
871 (locals != NULL && !PyMapping_Check(locals))) {
872 PyErr_BadInternalCall();
873 return NULL;
874 }
875 #endif
876
877 PyFrameObject *back = tstate->frame;
878 PyObject *builtins = frame_get_builtins(back, globals);
879 if (builtins == NULL) {
880 return NULL;
881 }
882
883 PyFrameObject *f = frame_alloc(code);
884 if (f == NULL) {
885 Py_DECREF(builtins);
886 return NULL;
887 }
888
889 f->f_stacktop = f->f_valuestack;
890 f->f_builtins = builtins;
891 Py_XINCREF(back);
892 f->f_back = back;
893 Py_INCREF(code);
894 Py_INCREF(globals);
895 f->f_globals = globals;
896 /* Most functions have CO_NEWLOCALS and CO_OPTIMIZED set. */
897 if ((code->co_flags & (CO_NEWLOCALS | CO_OPTIMIZED)) ==
898 (CO_NEWLOCALS | CO_OPTIMIZED))
899 ; /* f_locals = NULL; will be set by PyFrame_FastToLocals() */
900 else if (code->co_flags & CO_NEWLOCALS) {
901 locals = PyDict_New();
902 if (locals == NULL) {
903 Py_DECREF(f);
904 return NULL;
905 }
906 f->f_locals = locals;
907 }
908 else {
909 if (locals == NULL)
910 locals = globals;
911 Py_INCREF(locals);
912 f->f_locals = locals;
913 }
914
915 f->f_lasti = -1;
916 f->f_lineno = code->co_firstlineno;
917 f->f_iblock = 0;
918 f->f_executing = 0;
919 f->f_gen = NULL;
920 f->f_trace_opcodes = 0;
921 f->f_trace_lines = 1;
922
923 assert(f->f_code != NULL);
924
925 return f;
926 }
927
928 PyFrameObject*
PyFrame_New(PyThreadState * tstate,PyCodeObject * code,PyObject * globals,PyObject * locals)929 PyFrame_New(PyThreadState *tstate, PyCodeObject *code,
930 PyObject *globals, PyObject *locals)
931 {
932 PyFrameObject *f = _PyFrame_New_NoTrack(tstate, code, globals, locals);
933 if (f)
934 _PyObject_GC_TRACK(f);
935 return f;
936 }
937
938
939 /* Block management */
940
941 void
PyFrame_BlockSetup(PyFrameObject * f,int type,int handler,int level)942 PyFrame_BlockSetup(PyFrameObject *f, int type, int handler, int level)
943 {
944 PyTryBlock *b;
945 if (f->f_iblock >= CO_MAXBLOCKS) {
946 Py_FatalError("block stack overflow");
947 }
948 b = &f->f_blockstack[f->f_iblock++];
949 b->b_type = type;
950 b->b_level = level;
951 b->b_handler = handler;
952 }
953
954 PyTryBlock *
PyFrame_BlockPop(PyFrameObject * f)955 PyFrame_BlockPop(PyFrameObject *f)
956 {
957 PyTryBlock *b;
958 if (f->f_iblock <= 0) {
959 Py_FatalError("block stack underflow");
960 }
961 b = &f->f_blockstack[--f->f_iblock];
962 return b;
963 }
964
965 /* Convert between "fast" version of locals and dictionary version.
966
967 map and values are input arguments. map is a tuple of strings.
968 values is an array of PyObject*. At index i, map[i] is the name of
969 the variable with value values[i]. The function copies the first
970 nmap variable from map/values into dict. If values[i] is NULL,
971 the variable is deleted from dict.
972
973 If deref is true, then the values being copied are cell variables
974 and the value is extracted from the cell variable before being put
975 in dict.
976 */
977
978 static int
map_to_dict(PyObject * map,Py_ssize_t nmap,PyObject * dict,PyObject ** values,int deref)979 map_to_dict(PyObject *map, Py_ssize_t nmap, PyObject *dict, PyObject **values,
980 int deref)
981 {
982 Py_ssize_t j;
983 assert(PyTuple_Check(map));
984 assert(PyDict_Check(dict));
985 assert(PyTuple_Size(map) >= nmap);
986 for (j=0; j < nmap; j++) {
987 PyObject *key = PyTuple_GET_ITEM(map, j);
988 PyObject *value = values[j];
989 assert(PyUnicode_Check(key));
990 if (deref && value != NULL) {
991 assert(PyCell_Check(value));
992 value = PyCell_GET(value);
993 }
994 if (value == NULL) {
995 if (PyObject_DelItem(dict, key) != 0) {
996 if (PyErr_ExceptionMatches(PyExc_KeyError))
997 PyErr_Clear();
998 else
999 return -1;
1000 }
1001 }
1002 else {
1003 if (PyObject_SetItem(dict, key, value) != 0)
1004 return -1;
1005 }
1006 }
1007 return 0;
1008 }
1009
1010 /* Copy values from the "locals" dict into the fast locals.
1011
1012 dict is an input argument containing string keys representing
1013 variables names and arbitrary PyObject* as values.
1014
1015 map and values are input arguments. map is a tuple of strings.
1016 values is an array of PyObject*. At index i, map[i] is the name of
1017 the variable with value values[i]. The function copies the first
1018 nmap variable from map/values into dict. If values[i] is NULL,
1019 the variable is deleted from dict.
1020
1021 If deref is true, then the values being copied are cell variables
1022 and the value is extracted from the cell variable before being put
1023 in dict. If clear is true, then variables in map but not in dict
1024 are set to NULL in map; if clear is false, variables missing in
1025 dict are ignored.
1026
1027 Exceptions raised while modifying the dict are silently ignored,
1028 because there is no good way to report them.
1029 */
1030
1031 static void
dict_to_map(PyObject * map,Py_ssize_t nmap,PyObject * dict,PyObject ** values,int deref,int clear)1032 dict_to_map(PyObject *map, Py_ssize_t nmap, PyObject *dict, PyObject **values,
1033 int deref, int clear)
1034 {
1035 Py_ssize_t j;
1036 assert(PyTuple_Check(map));
1037 assert(PyDict_Check(dict));
1038 assert(PyTuple_Size(map) >= nmap);
1039 for (j=0; j < nmap; j++) {
1040 PyObject *key = PyTuple_GET_ITEM(map, j);
1041 PyObject *value = PyObject_GetItem(dict, key);
1042 assert(PyUnicode_Check(key));
1043 /* We only care about NULLs if clear is true. */
1044 if (value == NULL) {
1045 PyErr_Clear();
1046 if (!clear)
1047 continue;
1048 }
1049 if (deref) {
1050 assert(PyCell_Check(values[j]));
1051 if (PyCell_GET(values[j]) != value) {
1052 if (PyCell_Set(values[j], value) < 0)
1053 PyErr_Clear();
1054 }
1055 } else if (values[j] != value) {
1056 Py_XINCREF(value);
1057 Py_XSETREF(values[j], value);
1058 }
1059 Py_XDECREF(value);
1060 }
1061 }
1062
1063 int
PyFrame_FastToLocalsWithError(PyFrameObject * f)1064 PyFrame_FastToLocalsWithError(PyFrameObject *f)
1065 {
1066 /* Merge fast locals into f->f_locals */
1067 PyObject *locals, *map;
1068 PyObject **fast;
1069 PyCodeObject *co;
1070 Py_ssize_t j;
1071 Py_ssize_t ncells, nfreevars;
1072
1073 if (f == NULL) {
1074 PyErr_BadInternalCall();
1075 return -1;
1076 }
1077 locals = f->f_locals;
1078 if (locals == NULL) {
1079 locals = f->f_locals = PyDict_New();
1080 if (locals == NULL)
1081 return -1;
1082 }
1083 co = f->f_code;
1084 map = co->co_varnames;
1085 if (!PyTuple_Check(map)) {
1086 PyErr_Format(PyExc_SystemError,
1087 "co_varnames must be a tuple, not %s",
1088 Py_TYPE(map)->tp_name);
1089 return -1;
1090 }
1091 fast = f->f_localsplus;
1092 j = PyTuple_GET_SIZE(map);
1093 if (j > co->co_nlocals)
1094 j = co->co_nlocals;
1095 if (co->co_nlocals) {
1096 if (map_to_dict(map, j, locals, fast, 0) < 0)
1097 return -1;
1098 }
1099 ncells = PyTuple_GET_SIZE(co->co_cellvars);
1100 nfreevars = PyTuple_GET_SIZE(co->co_freevars);
1101 if (ncells || nfreevars) {
1102 if (map_to_dict(co->co_cellvars, ncells,
1103 locals, fast + co->co_nlocals, 1))
1104 return -1;
1105
1106 /* If the namespace is unoptimized, then one of the
1107 following cases applies:
1108 1. It does not contain free variables, because it
1109 uses import * or is a top-level namespace.
1110 2. It is a class namespace.
1111 We don't want to accidentally copy free variables
1112 into the locals dict used by the class.
1113 */
1114 if (co->co_flags & CO_OPTIMIZED) {
1115 if (map_to_dict(co->co_freevars, nfreevars,
1116 locals, fast + co->co_nlocals + ncells, 1) < 0)
1117 return -1;
1118 }
1119 }
1120 return 0;
1121 }
1122
1123 void
PyFrame_FastToLocals(PyFrameObject * f)1124 PyFrame_FastToLocals(PyFrameObject *f)
1125 {
1126 int res;
1127
1128 assert(!PyErr_Occurred());
1129
1130 res = PyFrame_FastToLocalsWithError(f);
1131 if (res < 0)
1132 PyErr_Clear();
1133 }
1134
1135 void
PyFrame_LocalsToFast(PyFrameObject * f,int clear)1136 PyFrame_LocalsToFast(PyFrameObject *f, int clear)
1137 {
1138 /* Merge f->f_locals into fast locals */
1139 PyObject *locals, *map;
1140 PyObject **fast;
1141 PyObject *error_type, *error_value, *error_traceback;
1142 PyCodeObject *co;
1143 Py_ssize_t j;
1144 Py_ssize_t ncells, nfreevars;
1145 if (f == NULL)
1146 return;
1147 locals = f->f_locals;
1148 co = f->f_code;
1149 map = co->co_varnames;
1150 if (locals == NULL)
1151 return;
1152 if (!PyTuple_Check(map))
1153 return;
1154 PyErr_Fetch(&error_type, &error_value, &error_traceback);
1155 fast = f->f_localsplus;
1156 j = PyTuple_GET_SIZE(map);
1157 if (j > co->co_nlocals)
1158 j = co->co_nlocals;
1159 if (co->co_nlocals)
1160 dict_to_map(co->co_varnames, j, locals, fast, 0, clear);
1161 ncells = PyTuple_GET_SIZE(co->co_cellvars);
1162 nfreevars = PyTuple_GET_SIZE(co->co_freevars);
1163 if (ncells || nfreevars) {
1164 dict_to_map(co->co_cellvars, ncells,
1165 locals, fast + co->co_nlocals, 1, clear);
1166 /* Same test as in PyFrame_FastToLocals() above. */
1167 if (co->co_flags & CO_OPTIMIZED) {
1168 dict_to_map(co->co_freevars, nfreevars,
1169 locals, fast + co->co_nlocals + ncells, 1,
1170 clear);
1171 }
1172 }
1173 PyErr_Restore(error_type, error_value, error_traceback);
1174 }
1175
1176 /* Clear out the free list */
1177 void
_PyFrame_ClearFreeList(void)1178 _PyFrame_ClearFreeList(void)
1179 {
1180 #if PyFrame_MAXFREELIST > 0
1181 while (free_list != NULL) {
1182 PyFrameObject *f = free_list;
1183 free_list = free_list->f_back;
1184 PyObject_GC_Del(f);
1185 --numfree;
1186 }
1187 assert(numfree == 0);
1188 #endif
1189 }
1190
1191 void
_PyFrame_Fini(void)1192 _PyFrame_Fini(void)
1193 {
1194 _PyFrame_ClearFreeList();
1195 }
1196
1197 /* Print summary info about the state of the optimized allocator */
1198 void
_PyFrame_DebugMallocStats(FILE * out)1199 _PyFrame_DebugMallocStats(FILE *out)
1200 {
1201 #if PyFrame_MAXFREELIST > 0
1202 _PyDebugAllocatorStats(out,
1203 "free PyFrameObject",
1204 numfree, sizeof(PyFrameObject));
1205 #endif
1206 }
1207
1208
1209 PyCodeObject *
PyFrame_GetCode(PyFrameObject * frame)1210 PyFrame_GetCode(PyFrameObject *frame)
1211 {
1212 assert(frame != NULL);
1213 PyCodeObject *code = frame->f_code;
1214 assert(code != NULL);
1215 Py_INCREF(code);
1216 return code;
1217 }
1218
1219
1220 PyFrameObject*
PyFrame_GetBack(PyFrameObject * frame)1221 PyFrame_GetBack(PyFrameObject *frame)
1222 {
1223 assert(frame != NULL);
1224 PyFrameObject *back = frame->f_back;
1225 Py_XINCREF(back);
1226 return back;
1227 }
1228