1 // This file contains instruction definitions.
2 // It is read by generators stored in Tools/cases_generator/
3 // to generate Python/generated_cases.c.h and others.
4 // Note that there is some dummy C code at the top and bottom of the file
5 // to fool text editors like VS Code into believing this is valid C code.
6 // The actual instruction definitions start at // BEGIN BYTECODES //.
7 // See Tools/cases_generator/README.md for more information.
8
9 #include "Python.h"
10 #include "pycore_abstract.h" // _PyIndex_Check()
11 #include "pycore_backoff.h"
12 #include "pycore_cell.h" // PyCell_GetRef()
13 #include "pycore_code.h"
14 #include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS
15 #include "pycore_function.h"
16 #include "pycore_instruments.h"
17 #include "pycore_intrinsics.h"
18 #include "pycore_long.h" // _PyLong_GetZero()
19 #include "pycore_moduleobject.h" // PyModuleObject
20 #include "pycore_object.h" // _PyObject_GC_TRACK()
21 #include "pycore_opcode_metadata.h" // uop names
22 #include "pycore_opcode_utils.h" // MAKE_FUNCTION_*
23 #include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_*
24 #include "pycore_pyerrors.h" // _PyErr_GetRaisedException()
25 #include "pycore_pystate.h" // _PyInterpreterState_GET()
26 #include "pycore_range.h" // _PyRangeIterObject
27 #include "pycore_setobject.h" // _PySet_NextEntry()
28 #include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs
29 #include "pycore_sysmodule.h" // _PySys_Audit()
30 #include "pycore_tuple.h" // _PyTuple_ITEMS()
31 #include "pycore_typeobject.h" // _PySuper_Lookup()
32
33 #include "pycore_dict.h"
34 #include "dictobject.h"
35 #include "pycore_frame.h"
36 #include "opcode.h"
37 #include "optimizer.h"
38 #include "pydtrace.h"
39 #include "setobject.h"
40
41
42 #define USE_COMPUTED_GOTOS 0
43 #include "ceval_macros.h"
44
45 /* Flow control macros */
46 #define GO_TO_INSTRUCTION(instname) ((void)0)
47
48 #define inst(name, ...) case name:
49 #define op(name, ...) /* NAME is ignored */
50 #define macro(name) static int MACRO_##name
51 #define super(name) static int SUPER_##name
52 #define family(name, ...) static int family_##name
53 #define pseudo(name) static int pseudo_##name
54
55 /* Annotations */
56 #define guard
57 #define override
58 #define specializing
59 #define split
60 #define replicate(TIMES)
61
62 // Dummy variables for stack effects.
63 static PyObject *value, *value1, *value2, *left, *right, *res, *sum, *prod, *sub;
64 static PyObject *container, *start, *stop, *v, *lhs, *rhs, *res2;
65 static PyObject *list, *tuple, *dict, *owner, *set, *str, *tup, *map, *keys;
66 static PyObject *exit_func, *lasti, *val, *retval, *obj, *iter, *exhausted;
67 static PyObject *aiter, *awaitable, *iterable, *w, *exc_value, *bc, *locals;
68 static PyObject *orig, *excs, *update, *b, *fromlist, *level, *from;
69 static PyObject **pieces, **values;
70 static size_t jump;
71 // Dummy variables for cache effects
72 static uint16_t invert, counter, index, hint;
73 #define unused 0 // Used in a macro def, can't be static
74 static uint32_t type_version;
75 static _PyExecutorObject *current_executor;
76
77 static PyObject *
dummy_func(PyThreadState * tstate,_PyInterpreterFrame * frame,unsigned char opcode,unsigned int oparg,_Py_CODEUNIT * next_instr,PyObject ** stack_pointer,int throwflag,PyObject * args[])78 dummy_func(
79 PyThreadState *tstate,
80 _PyInterpreterFrame *frame,
81 unsigned char opcode,
82 unsigned int oparg,
83 _Py_CODEUNIT *next_instr,
84 PyObject **stack_pointer,
85 int throwflag,
86 PyObject *args[]
87 )
88 {
89 // Dummy labels.
90 pop_1_error:
91 // Dummy locals.
92 PyObject *dummy;
93 _Py_CODEUNIT *this_instr;
94 PyObject *attr;
95 PyObject *attrs;
96 PyObject *bottom;
97 PyObject *callable;
98 PyObject *callargs;
99 PyObject *codeobj;
100 PyObject *cond;
101 PyObject *descr;
102 _PyInterpreterFrame entry_frame;
103 PyObject *exc;
104 PyObject *exit;
105 PyObject *fget;
106 PyObject *fmt_spec;
107 PyObject *func;
108 uint32_t func_version;
109 PyObject *getattribute;
110 PyObject *kwargs;
111 PyObject *kwdefaults;
112 PyObject *len_o;
113 PyObject *match;
114 PyObject *match_type;
115 PyObject *method;
116 PyObject *mgr;
117 Py_ssize_t min_args;
118 PyObject *names;
119 PyObject *new_exc;
120 PyObject *next;
121 PyObject *none;
122 PyObject *null;
123 PyObject *prev_exc;
124 PyObject *receiver;
125 PyObject *rest;
126 int result;
127 PyObject *self;
128 PyObject *seq;
129 PyObject *slice;
130 PyObject *step;
131 PyObject *subject;
132 PyObject *top;
133 PyObject *type;
134 PyObject *typevars;
135 PyObject *val0;
136 PyObject *val1;
137 int values_or_none;
138
139 switch (opcode) {
140
141 // BEGIN BYTECODES //
142 pure inst(NOP, (--)) {
143 }
144
145 family(RESUME, 0) = {
146 RESUME_CHECK,
147 };
148
149 tier1 inst(RESUME, (--)) {
150 assert(frame == tstate->current_frame);
151 if (tstate->tracing == 0) {
152 uintptr_t global_version =
153 _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) &
154 ~_PY_EVAL_EVENTS_MASK;
155 PyCodeObject* code = _PyFrame_GetCode(frame);
156 uintptr_t code_version = FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(code->_co_instrumentation_version);
157 assert((code_version & 255) == 0);
158 if (code_version != global_version) {
159 int err = _Py_Instrument(_PyFrame_GetCode(frame), tstate->interp);
160 ERROR_IF(err, error);
161 next_instr = this_instr;
162 DISPATCH();
163 }
164 assert(this_instr->op.code == RESUME ||
165 this_instr->op.code == RESUME_CHECK ||
166 this_instr->op.code == INSTRUMENTED_RESUME ||
167 this_instr->op.code == ENTER_EXECUTOR);
168 if (this_instr->op.code == RESUME) {
169 #if ENABLE_SPECIALIZATION
170 FT_ATOMIC_STORE_UINT8_RELAXED(this_instr->op.code, RESUME_CHECK);
171 #endif /* ENABLE_SPECIALIZATION */
172 }
173 }
174 if ((oparg & RESUME_OPARG_LOCATION_MASK) < RESUME_AFTER_YIELD_FROM) {
175 CHECK_EVAL_BREAKER();
176 }
177 }
178
179 inst(RESUME_CHECK, (--)) {
180 #if defined(__EMSCRIPTEN__)
181 DEOPT_IF(_Py_emscripten_signal_clock == 0);
182 _Py_emscripten_signal_clock -= Py_EMSCRIPTEN_SIGNAL_HANDLING;
183 #endif
184 uintptr_t eval_breaker = _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker);
185 uintptr_t version = FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version);
186 assert((version & _PY_EVAL_EVENTS_MASK) == 0);
187 DEOPT_IF(eval_breaker != version);
188 }
189
190 inst(INSTRUMENTED_RESUME, (--)) {
191 uintptr_t global_version = _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & ~_PY_EVAL_EVENTS_MASK;
192 uintptr_t code_version = FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version);
193 if (code_version != global_version && tstate->tracing == 0) {
194 if (_Py_Instrument(_PyFrame_GetCode(frame), tstate->interp)) {
195 ERROR_NO_POP();
196 }
197 next_instr = this_instr;
198 }
199 else {
200 if ((oparg & RESUME_OPARG_LOCATION_MASK) < RESUME_AFTER_YIELD_FROM) {
201 CHECK_EVAL_BREAKER();
202 }
203 _PyFrame_SetStackPointer(frame, stack_pointer);
204 int err = _Py_call_instrumentation(
205 tstate, oparg > 0, frame, this_instr);
206 stack_pointer = _PyFrame_GetStackPointer(frame);
207 ERROR_IF(err, error);
208 if (frame->instr_ptr != this_instr) {
209 /* Instrumentation has jumped */
210 next_instr = frame->instr_ptr;
211 DISPATCH();
212 }
213 }
214 }
215
216 pseudo(LOAD_CLOSURE) = {
217 LOAD_FAST,
218 };
219
220 inst(LOAD_FAST_CHECK, (-- value)) {
221 value = GETLOCAL(oparg);
222 if (value == NULL) {
223 _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError,
224 UNBOUNDLOCAL_ERROR_MSG,
225 PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg)
226 );
227 ERROR_IF(1, error);
228 }
229 Py_INCREF(value);
230 }
231
232 replicate(8) pure inst(LOAD_FAST, (-- value)) {
233 value = GETLOCAL(oparg);
234 assert(value != NULL);
235 Py_INCREF(value);
236 }
237
238 inst(LOAD_FAST_AND_CLEAR, (-- value)) {
239 value = GETLOCAL(oparg);
240 // do not use SETLOCAL here, it decrefs the old value
241 GETLOCAL(oparg) = NULL;
242 }
243
244 inst(LOAD_FAST_LOAD_FAST, ( -- value1, value2)) {
245 uint32_t oparg1 = oparg >> 4;
246 uint32_t oparg2 = oparg & 15;
247 value1 = GETLOCAL(oparg1);
248 value2 = GETLOCAL(oparg2);
249 Py_INCREF(value1);
250 Py_INCREF(value2);
251 }
252
253 pure inst(LOAD_CONST, (-- value)) {
254 value = GETITEM(FRAME_CO_CONSTS, oparg);
255 Py_INCREF(value);
256 }
257
258 replicate(8) inst(STORE_FAST, (value --)) {
259 SETLOCAL(oparg, value);
260 }
261
262 pseudo(STORE_FAST_MAYBE_NULL) = {
263 STORE_FAST,
264 };
265
266 inst(STORE_FAST_LOAD_FAST, (value1 -- value2)) {
267 uint32_t oparg1 = oparg >> 4;
268 uint32_t oparg2 = oparg & 15;
269 SETLOCAL(oparg1, value1);
270 value2 = GETLOCAL(oparg2);
271 Py_INCREF(value2);
272 }
273
274 inst(STORE_FAST_STORE_FAST, (value2, value1 --)) {
275 uint32_t oparg1 = oparg >> 4;
276 uint32_t oparg2 = oparg & 15;
277 SETLOCAL(oparg1, value1);
278 SETLOCAL(oparg2, value2);
279 }
280
281 pure inst(POP_TOP, (value --)) {
282 DECREF_INPUTS();
283 }
284
285 pure inst(PUSH_NULL, (-- res)) {
286 res = NULL;
287 }
288
289 macro(END_FOR) = POP_TOP;
290
291 tier1 inst(INSTRUMENTED_END_FOR, (receiver, value -- receiver)) {
292 /* Need to create a fake StopIteration error here,
293 * to conform to PEP 380 */
294 if (PyGen_Check(receiver)) {
295 if (monitor_stop_iteration(tstate, frame, this_instr, value)) {
296 ERROR_NO_POP();
297 }
298 }
299 DECREF_INPUTS();
300 }
301
302 pure inst(END_SEND, (receiver, value -- value)) {
303 Py_DECREF(receiver);
304 }
305
306 tier1 inst(INSTRUMENTED_END_SEND, (receiver, value -- value)) {
307 if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) {
308 if (monitor_stop_iteration(tstate, frame, this_instr, value)) {
309 ERROR_NO_POP();
310 }
311 }
312 Py_DECREF(receiver);
313 }
314
315 inst(UNARY_NEGATIVE, (value -- res)) {
316 res = PyNumber_Negative(value);
317 DECREF_INPUTS();
318 ERROR_IF(res == NULL, error);
319 }
320
321 pure inst(UNARY_NOT, (value -- res)) {
322 assert(PyBool_Check(value));
323 res = Py_IsFalse(value) ? Py_True : Py_False;
324 }
325
326 family(TO_BOOL, INLINE_CACHE_ENTRIES_TO_BOOL) = {
327 TO_BOOL_ALWAYS_TRUE,
328 TO_BOOL_BOOL,
329 TO_BOOL_INT,
330 TO_BOOL_LIST,
331 TO_BOOL_NONE,
332 TO_BOOL_STR,
333 };
334
335 specializing op(_SPECIALIZE_TO_BOOL, (counter/1, value -- value)) {
336 #if ENABLE_SPECIALIZATION
337 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
338 next_instr = this_instr;
339 _Py_Specialize_ToBool(value, next_instr);
340 DISPATCH_SAME_OPARG();
341 }
342 STAT_INC(TO_BOOL, deferred);
343 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
344 #endif /* ENABLE_SPECIALIZATION */
345 }
346
347 op(_TO_BOOL, (value -- res)) {
348 int err = PyObject_IsTrue(value);
349 DECREF_INPUTS();
350 ERROR_IF(err < 0, error);
351 res = err ? Py_True : Py_False;
352 }
353
354 macro(TO_BOOL) = _SPECIALIZE_TO_BOOL + unused/2 + _TO_BOOL;
355
356 inst(TO_BOOL_BOOL, (unused/1, unused/2, value -- value)) {
357 EXIT_IF(!PyBool_Check(value));
358 STAT_INC(TO_BOOL, hit);
359 }
360
361 inst(TO_BOOL_INT, (unused/1, unused/2, value -- res)) {
362 EXIT_IF(!PyLong_CheckExact(value));
363 STAT_INC(TO_BOOL, hit);
364 if (_PyLong_IsZero((PyLongObject *)value)) {
365 assert(_Py_IsImmortalLoose(value));
366 res = Py_False;
367 }
368 else {
369 DECREF_INPUTS();
370 res = Py_True;
371 }
372 }
373
374 inst(TO_BOOL_LIST, (unused/1, unused/2, value -- res)) {
375 EXIT_IF(!PyList_CheckExact(value));
376 STAT_INC(TO_BOOL, hit);
377 res = Py_SIZE(value) ? Py_True : Py_False;
378 DECREF_INPUTS();
379 }
380
381 inst(TO_BOOL_NONE, (unused/1, unused/2, value -- res)) {
382 // This one is a bit weird, because we expect *some* failures:
383 EXIT_IF(!Py_IsNone(value));
384 STAT_INC(TO_BOOL, hit);
385 res = Py_False;
386 }
387
388 inst(TO_BOOL_STR, (unused/1, unused/2, value -- res)) {
389 EXIT_IF(!PyUnicode_CheckExact(value));
390 STAT_INC(TO_BOOL, hit);
391 if (value == &_Py_STR(empty)) {
392 assert(_Py_IsImmortalLoose(value));
393 res = Py_False;
394 }
395 else {
396 assert(Py_SIZE(value));
397 DECREF_INPUTS();
398 res = Py_True;
399 }
400 }
401
402 op(_REPLACE_WITH_TRUE, (value -- res)) {
403 Py_DECREF(value);
404 res = Py_True;
405 }
406
407 macro(TO_BOOL_ALWAYS_TRUE) =
408 unused/1 +
409 _GUARD_TYPE_VERSION +
410 _REPLACE_WITH_TRUE;
411
412 inst(UNARY_INVERT, (value -- res)) {
413 res = PyNumber_Invert(value);
414 DECREF_INPUTS();
415 ERROR_IF(res == NULL, error);
416 }
417
418 family(BINARY_OP, INLINE_CACHE_ENTRIES_BINARY_OP) = {
419 BINARY_OP_MULTIPLY_INT,
420 BINARY_OP_ADD_INT,
421 BINARY_OP_SUBTRACT_INT,
422 BINARY_OP_MULTIPLY_FLOAT,
423 BINARY_OP_ADD_FLOAT,
424 BINARY_OP_SUBTRACT_FLOAT,
425 BINARY_OP_ADD_UNICODE,
426 // BINARY_OP_INPLACE_ADD_UNICODE, // See comments at that opcode.
427 };
428
429 op(_GUARD_BOTH_INT, (left, right -- left, right)) {
430 EXIT_IF(!PyLong_CheckExact(left));
431 EXIT_IF(!PyLong_CheckExact(right));
432 }
433
434 op(_GUARD_NOS_INT, (left, unused -- left, unused)) {
435 EXIT_IF(!PyLong_CheckExact(left));
436 }
437
438 op(_GUARD_TOS_INT, (value -- value)) {
439 EXIT_IF(!PyLong_CheckExact(value));
440 }
441
442 pure op(_BINARY_OP_MULTIPLY_INT, (left, right -- res)) {
443 STAT_INC(BINARY_OP, hit);
444 res = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right);
445 _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
446 _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
447 ERROR_IF(res == NULL, error);
448 }
449
450 pure op(_BINARY_OP_ADD_INT, (left, right -- res)) {
451 STAT_INC(BINARY_OP, hit);
452 res = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right);
453 _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
454 _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
455 ERROR_IF(res == NULL, error);
456 }
457
458 pure op(_BINARY_OP_SUBTRACT_INT, (left, right -- res)) {
459 STAT_INC(BINARY_OP, hit);
460 res = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right);
461 _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
462 _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
463 ERROR_IF(res == NULL, error);
464 }
465
466 macro(BINARY_OP_MULTIPLY_INT) =
467 _GUARD_BOTH_INT + unused/1 + _BINARY_OP_MULTIPLY_INT;
468 macro(BINARY_OP_ADD_INT) =
469 _GUARD_BOTH_INT + unused/1 + _BINARY_OP_ADD_INT;
470 macro(BINARY_OP_SUBTRACT_INT) =
471 _GUARD_BOTH_INT + unused/1 + _BINARY_OP_SUBTRACT_INT;
472
473 op(_GUARD_BOTH_FLOAT, (left, right -- left, right)) {
474 EXIT_IF(!PyFloat_CheckExact(left));
475 EXIT_IF(!PyFloat_CheckExact(right));
476 }
477
478 op(_GUARD_NOS_FLOAT, (left, unused -- left, unused)) {
479 EXIT_IF(!PyFloat_CheckExact(left));
480 }
481
482 op(_GUARD_TOS_FLOAT, (value -- value)) {
483 EXIT_IF(!PyFloat_CheckExact(value));
484 }
485
486 pure op(_BINARY_OP_MULTIPLY_FLOAT, (left, right -- res)) {
487 STAT_INC(BINARY_OP, hit);
488 double dres =
489 ((PyFloatObject *)left)->ob_fval *
490 ((PyFloatObject *)right)->ob_fval;
491 DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res);
492 }
493
494 pure op(_BINARY_OP_ADD_FLOAT, (left, right -- res)) {
495 STAT_INC(BINARY_OP, hit);
496 double dres =
497 ((PyFloatObject *)left)->ob_fval +
498 ((PyFloatObject *)right)->ob_fval;
499 DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res);
500 }
501
502 pure op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res)) {
503 STAT_INC(BINARY_OP, hit);
504 double dres =
505 ((PyFloatObject *)left)->ob_fval -
506 ((PyFloatObject *)right)->ob_fval;
507 DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res);
508 }
509
510 macro(BINARY_OP_MULTIPLY_FLOAT) =
511 _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_MULTIPLY_FLOAT;
512 macro(BINARY_OP_ADD_FLOAT) =
513 _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_ADD_FLOAT;
514 macro(BINARY_OP_SUBTRACT_FLOAT) =
515 _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_SUBTRACT_FLOAT;
516
517 op(_GUARD_BOTH_UNICODE, (left, right -- left, right)) {
518 EXIT_IF(!PyUnicode_CheckExact(left));
519 EXIT_IF(!PyUnicode_CheckExact(right));
520 }
521
522 pure op(_BINARY_OP_ADD_UNICODE, (left, right -- res)) {
523 STAT_INC(BINARY_OP, hit);
524 res = PyUnicode_Concat(left, right);
525 _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc);
526 _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
527 ERROR_IF(res == NULL, error);
528 }
529
530 macro(BINARY_OP_ADD_UNICODE) =
531 _GUARD_BOTH_UNICODE + unused/1 + _BINARY_OP_ADD_UNICODE;
532
533 // This is a subtle one. It's a super-instruction for
534 // BINARY_OP_ADD_UNICODE followed by STORE_FAST
535 // where the store goes into the left argument.
536 // So the inputs are the same as for all BINARY_OP
537 // specializations, but there is no output.
538 // At the end we just skip over the STORE_FAST.
539 tier1 op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) {
540 assert(next_instr->op.code == STORE_FAST);
541 PyObject **target_local = &GETLOCAL(next_instr->op.arg);
542 DEOPT_IF(*target_local != left);
543 STAT_INC(BINARY_OP, hit);
544 /* Handle `left = left + right` or `left += right` for str.
545 *
546 * When possible, extend `left` in place rather than
547 * allocating a new PyUnicodeObject. This attempts to avoid
548 * quadratic behavior when one neglects to use str.join().
549 *
550 * If `left` has only two references remaining (one from
551 * the stack, one in the locals), DECREFing `left` leaves
552 * only the locals reference, so PyUnicode_Append knows
553 * that the string is safe to mutate.
554 */
555 assert(Py_REFCNT(left) >= 2);
556 _Py_DECREF_NO_DEALLOC(left);
557 PyUnicode_Append(target_local, right);
558 _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
559 ERROR_IF(*target_local == NULL, error);
560 // The STORE_FAST is already done.
561 assert(next_instr->op.code == STORE_FAST);
562 SKIP_OVER(1);
563 }
564
565 macro(BINARY_OP_INPLACE_ADD_UNICODE) =
566 _GUARD_BOTH_UNICODE + unused/1 + _BINARY_OP_INPLACE_ADD_UNICODE;
567
568 family(BINARY_SUBSCR, INLINE_CACHE_ENTRIES_BINARY_SUBSCR) = {
569 BINARY_SUBSCR_DICT,
570 BINARY_SUBSCR_GETITEM,
571 BINARY_SUBSCR_LIST_INT,
572 BINARY_SUBSCR_STR_INT,
573 BINARY_SUBSCR_TUPLE_INT,
574 };
575
576 specializing op(_SPECIALIZE_BINARY_SUBSCR, (counter/1, container, sub -- container, sub)) {
577 #if ENABLE_SPECIALIZATION
578 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
579 next_instr = this_instr;
580 _Py_Specialize_BinarySubscr(container, sub, next_instr);
581 DISPATCH_SAME_OPARG();
582 }
583 STAT_INC(BINARY_SUBSCR, deferred);
584 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
585 #endif /* ENABLE_SPECIALIZATION */
586 }
587
588 op(_BINARY_SUBSCR, (container, sub -- res)) {
589 res = PyObject_GetItem(container, sub);
590 DECREF_INPUTS();
591 ERROR_IF(res == NULL, error);
592 }
593
594 macro(BINARY_SUBSCR) = _SPECIALIZE_BINARY_SUBSCR + _BINARY_SUBSCR;
595
596 inst(BINARY_SLICE, (container, start, stop -- res)) {
597 PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop);
598 // Can't use ERROR_IF() here, because we haven't
599 // DECREF'ed container yet, and we still own slice.
600 if (slice == NULL) {
601 res = NULL;
602 }
603 else {
604 res = PyObject_GetItem(container, slice);
605 Py_DECREF(slice);
606 }
607 Py_DECREF(container);
608 ERROR_IF(res == NULL, error);
609 }
610
611 inst(STORE_SLICE, (v, container, start, stop -- )) {
612 PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop);
613 int err;
614 if (slice == NULL) {
615 err = 1;
616 }
617 else {
618 err = PyObject_SetItem(container, slice, v);
619 Py_DECREF(slice);
620 }
621 Py_DECREF(v);
622 Py_DECREF(container);
623 ERROR_IF(err, error);
624 }
625
626 inst(BINARY_SUBSCR_LIST_INT, (unused/1, list, sub -- res)) {
627 DEOPT_IF(!PyLong_CheckExact(sub));
628 DEOPT_IF(!PyList_CheckExact(list));
629
630 // Deopt unless 0 <= sub < PyList_Size(list)
631 DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub));
632 Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0];
633 DEOPT_IF(index >= PyList_GET_SIZE(list));
634 STAT_INC(BINARY_SUBSCR, hit);
635 res = PyList_GET_ITEM(list, index);
636 assert(res != NULL);
637 Py_INCREF(res);
638 _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free);
639 Py_DECREF(list);
640 }
641
642 inst(BINARY_SUBSCR_STR_INT, (unused/1, str, sub -- res)) {
643 DEOPT_IF(!PyLong_CheckExact(sub));
644 DEOPT_IF(!PyUnicode_CheckExact(str));
645 DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub));
646 Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0];
647 DEOPT_IF(PyUnicode_GET_LENGTH(str) <= index);
648 // Specialize for reading an ASCII character from any string:
649 Py_UCS4 c = PyUnicode_READ_CHAR(str, index);
650 DEOPT_IF(Py_ARRAY_LENGTH(_Py_SINGLETON(strings).ascii) <= c);
651 STAT_INC(BINARY_SUBSCR, hit);
652 res = (PyObject*)&_Py_SINGLETON(strings).ascii[c];
653 _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free);
654 Py_DECREF(str);
655 }
656
657 inst(BINARY_SUBSCR_TUPLE_INT, (unused/1, tuple, sub -- res)) {
658 DEOPT_IF(!PyLong_CheckExact(sub));
659 DEOPT_IF(!PyTuple_CheckExact(tuple));
660
661 // Deopt unless 0 <= sub < PyTuple_Size(list)
662 DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub));
663 Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0];
664 DEOPT_IF(index >= PyTuple_GET_SIZE(tuple));
665 STAT_INC(BINARY_SUBSCR, hit);
666 res = PyTuple_GET_ITEM(tuple, index);
667 assert(res != NULL);
668 Py_INCREF(res);
669 _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free);
670 Py_DECREF(tuple);
671 }
672
673 inst(BINARY_SUBSCR_DICT, (unused/1, dict, sub -- res)) {
674 DEOPT_IF(!PyDict_CheckExact(dict));
675 STAT_INC(BINARY_SUBSCR, hit);
676 int rc = PyDict_GetItemRef(dict, sub, &res);
677 if (rc == 0) {
678 _PyErr_SetKeyError(sub);
679 }
680 DECREF_INPUTS();
681 ERROR_IF(rc <= 0, error); // not found or error
682 }
683
684 inst(BINARY_SUBSCR_GETITEM, (unused/1, container, sub -- unused)) {
685 DEOPT_IF(tstate->interp->eval_frame);
686 PyTypeObject *tp = Py_TYPE(container);
687 DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE));
688 PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
689 PyObject *cached = ht->_spec_cache.getitem;
690 DEOPT_IF(cached == NULL);
691 assert(PyFunction_Check(cached));
692 PyFunctionObject *getitem = (PyFunctionObject *)cached;
693 uint32_t cached_version = ht->_spec_cache.getitem_version;
694 DEOPT_IF(getitem->func_version != cached_version);
695 PyCodeObject *code = (PyCodeObject *)getitem->func_code;
696 assert(code->co_argcount == 2);
697 DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize));
698 STAT_INC(BINARY_SUBSCR, hit);
699 Py_INCREF(getitem);
700 _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem, 2);
701 STACK_SHRINK(2);
702 new_frame->localsplus[0] = container;
703 new_frame->localsplus[1] = sub;
704 frame->return_offset = (uint16_t)(next_instr - this_instr);
705 DISPATCH_INLINED(new_frame);
706 }
707
708 inst(LIST_APPEND, (list, unused[oparg-1], v -- list, unused[oparg-1])) {
709 ERROR_IF(_PyList_AppendTakeRef((PyListObject *)list, v) < 0, error);
710 }
711
712 inst(SET_ADD, (set, unused[oparg-1], v -- set, unused[oparg-1])) {
713 int err = PySet_Add(set, v);
714 DECREF_INPUTS();
715 ERROR_IF(err, error);
716 }
717
718 family(STORE_SUBSCR, INLINE_CACHE_ENTRIES_STORE_SUBSCR) = {
719 STORE_SUBSCR_DICT,
720 STORE_SUBSCR_LIST_INT,
721 };
722
723 specializing op(_SPECIALIZE_STORE_SUBSCR, (counter/1, container, sub -- container, sub)) {
724 #if ENABLE_SPECIALIZATION
725 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
726 next_instr = this_instr;
727 _Py_Specialize_StoreSubscr(container, sub, next_instr);
728 DISPATCH_SAME_OPARG();
729 }
730 STAT_INC(STORE_SUBSCR, deferred);
731 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
732 #endif /* ENABLE_SPECIALIZATION */
733 }
734
735 op(_STORE_SUBSCR, (v, container, sub -- )) {
736 /* container[sub] = v */
737 int err = PyObject_SetItem(container, sub, v);
738 DECREF_INPUTS();
739 ERROR_IF(err, error);
740 }
741
742 macro(STORE_SUBSCR) = _SPECIALIZE_STORE_SUBSCR + _STORE_SUBSCR;
743
744 inst(STORE_SUBSCR_LIST_INT, (unused/1, value, list, sub -- )) {
745 DEOPT_IF(!PyLong_CheckExact(sub));
746 DEOPT_IF(!PyList_CheckExact(list));
747
748 // Ensure nonnegative, zero-or-one-digit ints.
749 DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub));
750 Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0];
751 // Ensure index < len(list)
752 DEOPT_IF(index >= PyList_GET_SIZE(list));
753 STAT_INC(STORE_SUBSCR, hit);
754
755 PyObject *old_value = PyList_GET_ITEM(list, index);
756 PyList_SET_ITEM(list, index, value);
757 assert(old_value != NULL);
758 Py_DECREF(old_value);
759 _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free);
760 Py_DECREF(list);
761 }
762
763 inst(STORE_SUBSCR_DICT, (unused/1, value, dict, sub -- )) {
764 DEOPT_IF(!PyDict_CheckExact(dict));
765 STAT_INC(STORE_SUBSCR, hit);
766 int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value);
767 Py_DECREF(dict);
768 ERROR_IF(err, error);
769 }
770
771 inst(DELETE_SUBSCR, (container, sub --)) {
772 /* del container[sub] */
773 int err = PyObject_DelItem(container, sub);
774 DECREF_INPUTS();
775 ERROR_IF(err, error);
776 }
777
778 inst(CALL_INTRINSIC_1, (value -- res)) {
779 assert(oparg <= MAX_INTRINSIC_1);
780 res = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, value);
781 DECREF_INPUTS();
782 ERROR_IF(res == NULL, error);
783 }
784
785 inst(CALL_INTRINSIC_2, (value2, value1 -- res)) {
786 assert(oparg <= MAX_INTRINSIC_2);
787 res = _PyIntrinsics_BinaryFunctions[oparg].func(tstate, value2, value1);
788 DECREF_INPUTS();
789 ERROR_IF(res == NULL, error);
790 }
791
792 tier1 inst(RAISE_VARARGS, (args[oparg] -- )) {
793 PyObject *cause = NULL, *exc = NULL;
794 switch (oparg) {
795 case 2:
796 cause = args[1];
797 /* fall through */
798 case 1:
799 exc = args[0];
800 /* fall through */
801 case 0:
802 if (do_raise(tstate, exc, cause)) {
803 assert(oparg == 0);
804 monitor_reraise(tstate, frame, this_instr);
805 goto exception_unwind;
806 }
807 break;
808 default:
809 _PyErr_SetString(tstate, PyExc_SystemError,
810 "bad RAISE_VARARGS oparg");
811 break;
812 }
813 ERROR_IF(true, error);
814 }
815
816 tier1 inst(INTERPRETER_EXIT, (retval --)) {
817 assert(frame == &entry_frame);
818 assert(_PyFrame_IsIncomplete(frame));
819 /* Restore previous frame and return. */
820 tstate->current_frame = frame->previous;
821 assert(!_PyErr_Occurred(tstate));
822 tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS;
823 return retval;
824 }
825
826 // The stack effect here is ambiguous.
827 // We definitely pop the return value off the stack on entry.
828 // We also push it onto the stack on exit, but that's a
829 // different frame, and it's accounted for by _PUSH_FRAME.
830 op(_POP_FRAME, (retval --)) {
831 #if TIER_ONE
832 assert(frame != &entry_frame);
833 #endif
834 SYNC_SP();
835 _PyFrame_SetStackPointer(frame, stack_pointer);
836 assert(EMPTY());
837 _Py_LeaveRecursiveCallPy(tstate);
838 // GH-99729: We need to unlink the frame *before* clearing it:
839 _PyInterpreterFrame *dying = frame;
840 frame = tstate->current_frame = dying->previous;
841 _PyEval_FrameClearAndPop(tstate, dying);
842 _PyFrame_StackPush(frame, retval);
843 LOAD_SP();
844 LOAD_IP(frame->return_offset);
845 LLTRACE_RESUME_FRAME();
846 }
847
848 macro(RETURN_VALUE) =
849 _POP_FRAME;
850
851 inst(INSTRUMENTED_RETURN_VALUE, (retval --)) {
852 int err = _Py_call_instrumentation_arg(
853 tstate, PY_MONITORING_EVENT_PY_RETURN,
854 frame, this_instr, retval);
855 if (err) ERROR_NO_POP();
856 STACK_SHRINK(1);
857 assert(EMPTY());
858 _PyFrame_SetStackPointer(frame, stack_pointer);
859 _Py_LeaveRecursiveCallPy(tstate);
860 assert(frame != &entry_frame);
861 // GH-99729: We need to unlink the frame *before* clearing it:
862 _PyInterpreterFrame *dying = frame;
863 frame = tstate->current_frame = dying->previous;
864 _PyEval_FrameClearAndPop(tstate, dying);
865 _PyFrame_StackPush(frame, retval);
866 LOAD_IP(frame->return_offset);
867 goto resume_frame;
868 }
869
870 macro(RETURN_CONST) =
871 LOAD_CONST +
872 _POP_FRAME;
873
874 inst(INSTRUMENTED_RETURN_CONST, (--)) {
875 PyObject *retval = GETITEM(FRAME_CO_CONSTS, oparg);
876 int err = _Py_call_instrumentation_arg(
877 tstate, PY_MONITORING_EVENT_PY_RETURN,
878 frame, this_instr, retval);
879 if (err) ERROR_NO_POP();
880 Py_INCREF(retval);
881 assert(EMPTY());
882 _PyFrame_SetStackPointer(frame, stack_pointer);
883 _Py_LeaveRecursiveCallPy(tstate);
884 assert(frame != &entry_frame);
885 // GH-99729: We need to unlink the frame *before* clearing it:
886 _PyInterpreterFrame *dying = frame;
887 frame = tstate->current_frame = dying->previous;
888 _PyEval_FrameClearAndPop(tstate, dying);
889 _PyFrame_StackPush(frame, retval);
890 LOAD_IP(frame->return_offset);
891 goto resume_frame;
892 }
893
894 inst(GET_AITER, (obj -- iter)) {
895 unaryfunc getter = NULL;
896 PyTypeObject *type = Py_TYPE(obj);
897
898 if (type->tp_as_async != NULL) {
899 getter = type->tp_as_async->am_aiter;
900 }
901
902 if (getter == NULL) {
903 _PyErr_Format(tstate, PyExc_TypeError,
904 "'async for' requires an object with "
905 "__aiter__ method, got %.100s",
906 type->tp_name);
907 DECREF_INPUTS();
908 ERROR_IF(true, error);
909 }
910
911 iter = (*getter)(obj);
912 DECREF_INPUTS();
913 ERROR_IF(iter == NULL, error);
914
915 if (Py_TYPE(iter)->tp_as_async == NULL ||
916 Py_TYPE(iter)->tp_as_async->am_anext == NULL) {
917
918 _PyErr_Format(tstate, PyExc_TypeError,
919 "'async for' received an object from __aiter__ "
920 "that does not implement __anext__: %.100s",
921 Py_TYPE(iter)->tp_name);
922 Py_DECREF(iter);
923 ERROR_IF(true, error);
924 }
925 }
926
927 inst(GET_ANEXT, (aiter -- aiter, awaitable)) {
928 unaryfunc getter = NULL;
929 PyObject *next_iter = NULL;
930 PyTypeObject *type = Py_TYPE(aiter);
931
932 if (PyAsyncGen_CheckExact(aiter)) {
933 awaitable = type->tp_as_async->am_anext(aiter);
934 if (awaitable == NULL) {
935 ERROR_NO_POP();
936 }
937 } else {
938 if (type->tp_as_async != NULL){
939 getter = type->tp_as_async->am_anext;
940 }
941
942 if (getter != NULL) {
943 next_iter = (*getter)(aiter);
944 if (next_iter == NULL) {
945 ERROR_NO_POP();
946 }
947 }
948 else {
949 _PyErr_Format(tstate, PyExc_TypeError,
950 "'async for' requires an iterator with "
951 "__anext__ method, got %.100s",
952 type->tp_name);
953 ERROR_NO_POP();
954 }
955
956 awaitable = _PyCoro_GetAwaitableIter(next_iter);
957 if (awaitable == NULL) {
958 _PyErr_FormatFromCause(
959 PyExc_TypeError,
960 "'async for' received an invalid object "
961 "from __anext__: %.100s",
962 Py_TYPE(next_iter)->tp_name);
963
964 Py_DECREF(next_iter);
965 ERROR_NO_POP();
966 } else {
967 Py_DECREF(next_iter);
968 }
969 }
970 }
971
972 inst(GET_AWAITABLE, (iterable -- iter)) {
973 iter = _PyCoro_GetAwaitableIter(iterable);
974
975 if (iter == NULL) {
976 _PyEval_FormatAwaitableError(tstate, Py_TYPE(iterable), oparg);
977 }
978
979 DECREF_INPUTS();
980
981 if (iter != NULL && PyCoro_CheckExact(iter)) {
982 PyObject *yf = _PyGen_yf((PyGenObject*)iter);
983 if (yf != NULL) {
984 /* `iter` is a coroutine object that is being
985 awaited, `yf` is a pointer to the current awaitable
986 being awaited on. */
987 Py_DECREF(yf);
988 Py_CLEAR(iter);
989 _PyErr_SetString(tstate, PyExc_RuntimeError,
990 "coroutine is being awaited already");
991 /* The code below jumps to `error` if `iter` is NULL. */
992 }
993 }
994
995 ERROR_IF(iter == NULL, error);
996 }
997
998 family(SEND, INLINE_CACHE_ENTRIES_SEND) = {
999 SEND_GEN,
1000 };
1001
1002 specializing op(_SPECIALIZE_SEND, (counter/1, receiver, unused -- receiver, unused)) {
1003 #if ENABLE_SPECIALIZATION
1004 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
1005 next_instr = this_instr;
1006 _Py_Specialize_Send(receiver, next_instr);
1007 DISPATCH_SAME_OPARG();
1008 }
1009 STAT_INC(SEND, deferred);
1010 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
1011 #endif /* ENABLE_SPECIALIZATION */
1012 }
1013
1014 op(_SEND, (receiver, v -- receiver, retval)) {
1015 assert(frame != &entry_frame);
1016 if ((tstate->interp->eval_frame == NULL) &&
1017 (Py_TYPE(receiver) == &PyGen_Type || Py_TYPE(receiver) == &PyCoro_Type) &&
1018 ((PyGenObject *)receiver)->gi_frame_state < FRAME_EXECUTING)
1019 {
1020 PyGenObject *gen = (PyGenObject *)receiver;
1021 _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
1022 STACK_SHRINK(1);
1023 _PyFrame_StackPush(gen_frame, v);
1024 gen->gi_frame_state = FRAME_EXECUTING;
1025 gen->gi_exc_state.previous_item = tstate->exc_info;
1026 tstate->exc_info = &gen->gi_exc_state;
1027 assert(next_instr - this_instr + oparg <= UINT16_MAX);
1028 frame->return_offset = (uint16_t)(next_instr - this_instr + oparg);
1029 DISPATCH_INLINED(gen_frame);
1030 }
1031 if (Py_IsNone(v) && PyIter_Check(receiver)) {
1032 retval = Py_TYPE(receiver)->tp_iternext(receiver);
1033 }
1034 else {
1035 retval = PyObject_CallMethodOneArg(receiver, &_Py_ID(send), v);
1036 }
1037 if (retval == NULL) {
1038 if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
1039 ) {
1040 _PyEval_MonitorRaise(tstate, frame, this_instr);
1041 }
1042 if (_PyGen_FetchStopIterationValue(&retval) == 0) {
1043 assert(retval != NULL);
1044 JUMPBY(oparg);
1045 }
1046 else {
1047 ERROR_NO_POP();
1048 }
1049 }
1050 Py_DECREF(v);
1051 }
1052
1053 macro(SEND) = _SPECIALIZE_SEND + _SEND;
1054
1055 inst(SEND_GEN, (unused/1, receiver, v -- receiver, unused)) {
1056 DEOPT_IF(tstate->interp->eval_frame);
1057 PyGenObject *gen = (PyGenObject *)receiver;
1058 DEOPT_IF(Py_TYPE(gen) != &PyGen_Type && Py_TYPE(gen) != &PyCoro_Type);
1059 DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING);
1060 STAT_INC(SEND, hit);
1061 _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
1062 STACK_SHRINK(1);
1063 _PyFrame_StackPush(gen_frame, v);
1064 gen->gi_frame_state = FRAME_EXECUTING;
1065 gen->gi_exc_state.previous_item = tstate->exc_info;
1066 tstate->exc_info = &gen->gi_exc_state;
1067 assert(next_instr - this_instr + oparg <= UINT16_MAX);
1068 frame->return_offset = (uint16_t)(next_instr - this_instr + oparg);
1069 DISPATCH_INLINED(gen_frame);
1070 }
1071
1072 inst(INSTRUMENTED_YIELD_VALUE, (retval -- unused)) {
1073 assert(frame != &entry_frame);
1074 frame->instr_ptr = next_instr;
1075 PyGenObject *gen = _PyFrame_GetGenerator(frame);
1076 assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1);
1077 assert(oparg == 0 || oparg == 1);
1078 gen->gi_frame_state = FRAME_SUSPENDED + oparg;
1079 _PyFrame_SetStackPointer(frame, stack_pointer - 1);
1080 int err = _Py_call_instrumentation_arg(
1081 tstate, PY_MONITORING_EVENT_PY_YIELD,
1082 frame, this_instr, retval);
1083 if (err) ERROR_NO_POP();
1084 tstate->exc_info = gen->gi_exc_state.previous_item;
1085 gen->gi_exc_state.previous_item = NULL;
1086 _Py_LeaveRecursiveCallPy(tstate);
1087 _PyInterpreterFrame *gen_frame = frame;
1088 frame = tstate->current_frame = frame->previous;
1089 gen_frame->previous = NULL;
1090 _PyFrame_StackPush(frame, retval);
1091 /* We don't know which of these is relevant here, so keep them equal */
1092 assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER);
1093 LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND);
1094 goto resume_frame;
1095 }
1096
1097 inst(YIELD_VALUE, (retval -- value)) {
1098 // NOTE: It's important that YIELD_VALUE never raises an exception!
1099 // The compiler treats any exception raised here as a failed close()
1100 // or throw() call.
1101 #if TIER_ONE
1102 assert(frame != &entry_frame);
1103 #endif
1104 frame->instr_ptr++;
1105 PyGenObject *gen = _PyFrame_GetGenerator(frame);
1106 assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1);
1107 assert(oparg == 0 || oparg == 1);
1108 gen->gi_frame_state = FRAME_SUSPENDED + oparg;
1109 SYNC_SP();
1110 _PyFrame_SetStackPointer(frame, stack_pointer);
1111 tstate->exc_info = gen->gi_exc_state.previous_item;
1112 gen->gi_exc_state.previous_item = NULL;
1113 _Py_LeaveRecursiveCallPy(tstate);
1114 _PyInterpreterFrame *gen_frame = frame;
1115 frame = tstate->current_frame = frame->previous;
1116 gen_frame->previous = NULL;
1117 /* We don't know which of these is relevant here, so keep them equal */
1118 assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER);
1119 #if TIER_ONE
1120 assert(frame->instr_ptr->op.code == INSTRUMENTED_LINE ||
1121 frame->instr_ptr->op.code == INSTRUMENTED_INSTRUCTION ||
1122 _PyOpcode_Deopt[frame->instr_ptr->op.code] == SEND ||
1123 _PyOpcode_Deopt[frame->instr_ptr->op.code] == FOR_ITER ||
1124 _PyOpcode_Deopt[frame->instr_ptr->op.code] == INTERPRETER_EXIT ||
1125 _PyOpcode_Deopt[frame->instr_ptr->op.code] == ENTER_EXECUTOR);
1126 #endif
1127 LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND);
1128 LOAD_SP();
1129 value = retval;
1130 LLTRACE_RESUME_FRAME();
1131 }
1132
1133 inst(POP_EXCEPT, (exc_value -- )) {
1134 _PyErr_StackItem *exc_info = tstate->exc_info;
1135 Py_XSETREF(exc_info->exc_value, exc_value == Py_None ? NULL : exc_value);
1136 }
1137
1138 tier1 inst(RERAISE, (values[oparg], exc -- values[oparg])) {
1139 assert(oparg >= 0 && oparg <= 2);
1140 if (oparg) {
1141 PyObject *lasti = values[0];
1142 if (PyLong_Check(lasti)) {
1143 frame->instr_ptr = _PyCode_CODE(_PyFrame_GetCode(frame)) + PyLong_AsLong(lasti);
1144 assert(!_PyErr_Occurred(tstate));
1145 }
1146 else {
1147 assert(PyLong_Check(lasti));
1148 _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int");
1149 ERROR_NO_POP();
1150 }
1151 }
1152 assert(exc && PyExceptionInstance_Check(exc));
1153 Py_INCREF(exc);
1154 _PyErr_SetRaisedException(tstate, exc);
1155 monitor_reraise(tstate, frame, this_instr);
1156 goto exception_unwind;
1157 }
1158
1159 tier1 inst(END_ASYNC_FOR, (awaitable, exc -- )) {
1160 assert(exc && PyExceptionInstance_Check(exc));
1161 if (PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) {
1162 DECREF_INPUTS();
1163 }
1164 else {
1165 Py_INCREF(exc);
1166 _PyErr_SetRaisedException(tstate, exc);
1167 monitor_reraise(tstate, frame, this_instr);
1168 goto exception_unwind;
1169 }
1170 }
1171
1172 tier1 inst(CLEANUP_THROW, (sub_iter, last_sent_val, exc_value -- none, value)) {
1173 assert(throwflag);
1174 assert(exc_value && PyExceptionInstance_Check(exc_value));
1175 if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) {
1176 value = Py_NewRef(((PyStopIterationObject *)exc_value)->value);
1177 DECREF_INPUTS();
1178 none = Py_None;
1179 }
1180 else {
1181 _PyErr_SetRaisedException(tstate, Py_NewRef(exc_value));
1182 monitor_reraise(tstate, frame, this_instr);
1183 goto exception_unwind;
1184 }
1185 }
1186
1187 inst(LOAD_ASSERTION_ERROR, ( -- value)) {
1188 value = Py_NewRef(PyExc_AssertionError);
1189 }
1190
1191 inst(LOAD_BUILD_CLASS, ( -- bc)) {
1192 ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), &_Py_ID(__build_class__), &bc) < 0, error);
1193 if (bc == NULL) {
1194 _PyErr_SetString(tstate, PyExc_NameError,
1195 "__build_class__ not found");
1196 ERROR_IF(true, error);
1197 }
1198 }
1199
1200 inst(STORE_NAME, (v -- )) {
1201 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
1202 PyObject *ns = LOCALS();
1203 int err;
1204 if (ns == NULL) {
1205 _PyErr_Format(tstate, PyExc_SystemError,
1206 "no locals found when storing %R", name);
1207 DECREF_INPUTS();
1208 ERROR_IF(true, error);
1209 }
1210 if (PyDict_CheckExact(ns))
1211 err = PyDict_SetItem(ns, name, v);
1212 else
1213 err = PyObject_SetItem(ns, name, v);
1214 DECREF_INPUTS();
1215 ERROR_IF(err, error);
1216 }
1217
1218 inst(DELETE_NAME, (--)) {
1219 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
1220 PyObject *ns = LOCALS();
1221 int err;
1222 if (ns == NULL) {
1223 _PyErr_Format(tstate, PyExc_SystemError,
1224 "no locals when deleting %R", name);
1225 ERROR_NO_POP();
1226 }
1227 err = PyObject_DelItem(ns, name);
1228 // Can't use ERROR_IF here.
1229 if (err != 0) {
1230 _PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
1231 NAME_ERROR_MSG,
1232 name);
1233 ERROR_NO_POP();
1234 }
1235 }
1236
1237 family(UNPACK_SEQUENCE, INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE) = {
1238 UNPACK_SEQUENCE_TWO_TUPLE,
1239 UNPACK_SEQUENCE_TUPLE,
1240 UNPACK_SEQUENCE_LIST,
1241 };
1242
1243 specializing op(_SPECIALIZE_UNPACK_SEQUENCE, (counter/1, seq -- seq)) {
1244 #if ENABLE_SPECIALIZATION
1245 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
1246 next_instr = this_instr;
1247 _Py_Specialize_UnpackSequence(seq, next_instr, oparg);
1248 DISPATCH_SAME_OPARG();
1249 }
1250 STAT_INC(UNPACK_SEQUENCE, deferred);
1251 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
1252 #endif /* ENABLE_SPECIALIZATION */
1253 (void)seq;
1254 (void)counter;
1255 }
1256
1257 op(_UNPACK_SEQUENCE, (seq -- unused[oparg])) {
1258 PyObject **top = stack_pointer + oparg - 1;
1259 int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top);
1260 DECREF_INPUTS();
1261 ERROR_IF(res == 0, error);
1262 }
1263
1264 macro(UNPACK_SEQUENCE) = _SPECIALIZE_UNPACK_SEQUENCE + _UNPACK_SEQUENCE;
1265
1266 inst(UNPACK_SEQUENCE_TWO_TUPLE, (unused/1, seq -- val1, val0)) {
1267 assert(oparg == 2);
1268 DEOPT_IF(!PyTuple_CheckExact(seq));
1269 DEOPT_IF(PyTuple_GET_SIZE(seq) != 2);
1270 STAT_INC(UNPACK_SEQUENCE, hit);
1271 val0 = Py_NewRef(PyTuple_GET_ITEM(seq, 0));
1272 val1 = Py_NewRef(PyTuple_GET_ITEM(seq, 1));
1273 DECREF_INPUTS();
1274 }
1275
1276 inst(UNPACK_SEQUENCE_TUPLE, (unused/1, seq -- values[oparg])) {
1277 DEOPT_IF(!PyTuple_CheckExact(seq));
1278 DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg);
1279 STAT_INC(UNPACK_SEQUENCE, hit);
1280 PyObject **items = _PyTuple_ITEMS(seq);
1281 for (int i = oparg; --i >= 0; ) {
1282 *values++ = Py_NewRef(items[i]);
1283 }
1284 DECREF_INPUTS();
1285 }
1286
1287 inst(UNPACK_SEQUENCE_LIST, (unused/1, seq -- values[oparg])) {
1288 DEOPT_IF(!PyList_CheckExact(seq));
1289 DEOPT_IF(PyList_GET_SIZE(seq) != oparg);
1290 STAT_INC(UNPACK_SEQUENCE, hit);
1291 PyObject **items = _PyList_ITEMS(seq);
1292 for (int i = oparg; --i >= 0; ) {
1293 *values++ = Py_NewRef(items[i]);
1294 }
1295 DECREF_INPUTS();
1296 }
1297
1298 inst(UNPACK_EX, (seq -- unused[oparg & 0xFF], unused, unused[oparg >> 8])) {
1299 int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8);
1300 PyObject **top = stack_pointer + totalargs - 1;
1301 int res = _PyEval_UnpackIterable(tstate, seq, oparg & 0xFF, oparg >> 8, top);
1302 DECREF_INPUTS();
1303 ERROR_IF(res == 0, error);
1304 }
1305
1306 family(STORE_ATTR, INLINE_CACHE_ENTRIES_STORE_ATTR) = {
1307 STORE_ATTR_INSTANCE_VALUE,
1308 STORE_ATTR_SLOT,
1309 STORE_ATTR_WITH_HINT,
1310 };
1311
1312 specializing op(_SPECIALIZE_STORE_ATTR, (counter/1, owner -- owner)) {
1313 #if ENABLE_SPECIALIZATION
1314 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
1315 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
1316 next_instr = this_instr;
1317 _Py_Specialize_StoreAttr(owner, next_instr, name);
1318 DISPATCH_SAME_OPARG();
1319 }
1320 STAT_INC(STORE_ATTR, deferred);
1321 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
1322 #endif /* ENABLE_SPECIALIZATION */
1323 }
1324
1325 op(_STORE_ATTR, (v, owner --)) {
1326 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
1327 int err = PyObject_SetAttr(owner, name, v);
1328 DECREF_INPUTS();
1329 ERROR_IF(err, error);
1330 }
1331
1332 macro(STORE_ATTR) = _SPECIALIZE_STORE_ATTR + unused/3 + _STORE_ATTR;
1333
1334 inst(DELETE_ATTR, (owner --)) {
1335 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
1336 int err = PyObject_DelAttr(owner, name);
1337 DECREF_INPUTS();
1338 ERROR_IF(err, error);
1339 }
1340
1341 inst(STORE_GLOBAL, (v --)) {
1342 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
1343 int err = PyDict_SetItem(GLOBALS(), name, v);
1344 DECREF_INPUTS();
1345 ERROR_IF(err, error);
1346 }
1347
1348 inst(DELETE_GLOBAL, (--)) {
1349 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
1350 int err = PyDict_Pop(GLOBALS(), name, NULL);
1351 // Can't use ERROR_IF here.
1352 if (err < 0) {
1353 ERROR_NO_POP();
1354 }
1355 if (err == 0) {
1356 _PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
1357 NAME_ERROR_MSG, name);
1358 ERROR_NO_POP();
1359 }
1360 }
1361
1362 inst(LOAD_LOCALS, ( -- locals)) {
1363 locals = LOCALS();
1364 if (locals == NULL) {
1365 _PyErr_SetString(tstate, PyExc_SystemError,
1366 "no locals found");
1367 ERROR_IF(true, error);
1368 }
1369 Py_INCREF(locals);
1370 }
1371
1372 inst(LOAD_FROM_DICT_OR_GLOBALS, (mod_or_class_dict -- v)) {
1373 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
1374 if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) {
1375 ERROR_NO_POP();
1376 }
1377 if (v == NULL) {
1378 if (PyDict_CheckExact(GLOBALS())
1379 && PyDict_CheckExact(BUILTINS()))
1380 {
1381 v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(),
1382 (PyDictObject *)BUILTINS(),
1383 name);
1384 if (v == NULL) {
1385 if (!_PyErr_Occurred(tstate)) {
1386 /* _PyDict_LoadGlobal() returns NULL without raising
1387 * an exception if the key doesn't exist */
1388 _PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
1389 NAME_ERROR_MSG, name);
1390 }
1391 ERROR_NO_POP();
1392 }
1393 }
1394 else {
1395 /* Slow-path if globals or builtins is not a dict */
1396 /* namespace 1: globals */
1397 ERROR_IF(PyMapping_GetOptionalItem(GLOBALS(), name, &v) < 0, error);
1398 if (v == NULL) {
1399 /* namespace 2: builtins */
1400 ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0, error);
1401 if (v == NULL) {
1402 _PyEval_FormatExcCheckArg(
1403 tstate, PyExc_NameError,
1404 NAME_ERROR_MSG, name);
1405 ERROR_IF(true, error);
1406 }
1407 }
1408 }
1409 }
1410 DECREF_INPUTS();
1411 }
1412
1413 inst(LOAD_NAME, (-- v)) {
1414 PyObject *mod_or_class_dict = LOCALS();
1415 if (mod_or_class_dict == NULL) {
1416 _PyErr_SetString(tstate, PyExc_SystemError,
1417 "no locals found");
1418 ERROR_IF(true, error);
1419 }
1420 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
1421 if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) {
1422 ERROR_NO_POP();
1423 }
1424 if (v == NULL) {
1425 if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) {
1426 ERROR_NO_POP();
1427 }
1428 if (v == NULL) {
1429 if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) {
1430 ERROR_NO_POP();
1431 }
1432 if (v == NULL) {
1433 _PyEval_FormatExcCheckArg(
1434 tstate, PyExc_NameError,
1435 NAME_ERROR_MSG, name);
1436 ERROR_NO_POP();
1437 }
1438 }
1439 }
1440 }
1441
1442 family(LOAD_GLOBAL, INLINE_CACHE_ENTRIES_LOAD_GLOBAL) = {
1443 LOAD_GLOBAL_MODULE,
1444 LOAD_GLOBAL_BUILTIN,
1445 };
1446
1447 specializing op(_SPECIALIZE_LOAD_GLOBAL, (counter/1 -- )) {
1448 #if ENABLE_SPECIALIZATION
1449 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
1450 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
1451 next_instr = this_instr;
1452 _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
1453 DISPATCH_SAME_OPARG();
1454 }
1455 STAT_INC(LOAD_GLOBAL, deferred);
1456 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
1457 #endif /* ENABLE_SPECIALIZATION */
1458 }
1459
1460 op(_LOAD_GLOBAL, ( -- res, null if (oparg & 1))) {
1461 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
1462 if (PyDict_CheckExact(GLOBALS())
1463 && PyDict_CheckExact(BUILTINS()))
1464 {
1465 res = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(),
1466 (PyDictObject *)BUILTINS(),
1467 name);
1468 if (res == NULL) {
1469 if (!_PyErr_Occurred(tstate)) {
1470 /* _PyDict_LoadGlobal() returns NULL without raising
1471 * an exception if the key doesn't exist */
1472 _PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
1473 NAME_ERROR_MSG, name);
1474 }
1475 ERROR_IF(true, error);
1476 }
1477 }
1478 else {
1479 /* Slow-path if globals or builtins is not a dict */
1480 /* namespace 1: globals */
1481 ERROR_IF(PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0, error);
1482 if (res == NULL) {
1483 /* namespace 2: builtins */
1484 ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0, error);
1485 if (res == NULL) {
1486 _PyEval_FormatExcCheckArg(
1487 tstate, PyExc_NameError,
1488 NAME_ERROR_MSG, name);
1489 ERROR_IF(true, error);
1490 }
1491 }
1492 }
1493 null = NULL;
1494 }
1495
1496 macro(LOAD_GLOBAL) =
1497 _SPECIALIZE_LOAD_GLOBAL +
1498 counter/1 +
1499 globals_version/1 +
1500 builtins_version/1 +
1501 _LOAD_GLOBAL;
1502
1503 op(_GUARD_GLOBALS_VERSION, (version/1 --)) {
1504 PyDictObject *dict = (PyDictObject *)GLOBALS();
1505 DEOPT_IF(!PyDict_CheckExact(dict));
1506 DEOPT_IF(dict->ma_keys->dk_version != version);
1507 assert(DK_IS_UNICODE(dict->ma_keys));
1508 }
1509
1510 op(_GUARD_BUILTINS_VERSION, (version/1 --)) {
1511 PyDictObject *dict = (PyDictObject *)BUILTINS();
1512 DEOPT_IF(!PyDict_CheckExact(dict));
1513 DEOPT_IF(dict->ma_keys->dk_version != version);
1514 assert(DK_IS_UNICODE(dict->ma_keys));
1515 }
1516
1517 op(_LOAD_GLOBAL_MODULE, (index/1 -- res, null if (oparg & 1))) {
1518 PyDictObject *dict = (PyDictObject *)GLOBALS();
1519 PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys);
1520 res = entries[index].me_value;
1521 DEOPT_IF(res == NULL);
1522 Py_INCREF(res);
1523 STAT_INC(LOAD_GLOBAL, hit);
1524 null = NULL;
1525 }
1526
1527 op(_LOAD_GLOBAL_BUILTINS, (index/1 -- res, null if (oparg & 1))) {
1528 PyDictObject *bdict = (PyDictObject *)BUILTINS();
1529 PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys);
1530 res = entries[index].me_value;
1531 DEOPT_IF(res == NULL);
1532 Py_INCREF(res);
1533 STAT_INC(LOAD_GLOBAL, hit);
1534 null = NULL;
1535 }
1536
1537 macro(LOAD_GLOBAL_MODULE) =
1538 unused/1 + // Skip over the counter
1539 _GUARD_GLOBALS_VERSION +
1540 unused/1 + // Skip over the builtins version
1541 _LOAD_GLOBAL_MODULE;
1542
1543 macro(LOAD_GLOBAL_BUILTIN) =
1544 unused/1 + // Skip over the counter
1545 _GUARD_GLOBALS_VERSION +
1546 _GUARD_BUILTINS_VERSION +
1547 _LOAD_GLOBAL_BUILTINS;
1548
1549 inst(DELETE_FAST, (--)) {
1550 PyObject *v = GETLOCAL(oparg);
1551 if (v == NULL) {
1552 _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError,
1553 UNBOUNDLOCAL_ERROR_MSG,
1554 PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg)
1555 );
1556 ERROR_IF(1, error);
1557 }
1558 SETLOCAL(oparg, NULL);
1559 }
1560
1561 inst(MAKE_CELL, (--)) {
1562 // "initial" is probably NULL but not if it's an arg (or set
1563 // via the f_locals proxy before MAKE_CELL has run).
1564 PyObject *initial = GETLOCAL(oparg);
1565 PyObject *cell = PyCell_New(initial);
1566 if (cell == NULL) {
1567 ERROR_NO_POP();
1568 }
1569 SETLOCAL(oparg, cell);
1570 }
1571
1572 inst(DELETE_DEREF, (--)) {
1573 PyObject *cell = GETLOCAL(oparg);
1574 // Can't use ERROR_IF here.
1575 // Fortunately we don't need its superpower.
1576 PyObject *oldobj = PyCell_SwapTakeRef((PyCellObject *)cell, NULL);
1577 if (oldobj == NULL) {
1578 _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg);
1579 ERROR_NO_POP();
1580 }
1581 Py_DECREF(oldobj);
1582 }
1583
1584 inst(LOAD_FROM_DICT_OR_DEREF, (class_dict -- value)) {
1585 PyObject *name;
1586 assert(class_dict);
1587 assert(oparg >= 0 && oparg < _PyFrame_GetCode(frame)->co_nlocalsplus);
1588 name = PyTuple_GET_ITEM(_PyFrame_GetCode(frame)->co_localsplusnames, oparg);
1589 if (PyMapping_GetOptionalItem(class_dict, name, &value) < 0) {
1590 ERROR_NO_POP();
1591 }
1592 if (!value) {
1593 PyCellObject *cell = (PyCellObject *)GETLOCAL(oparg);
1594 value = PyCell_GetRef(cell);
1595 if (value == NULL) {
1596 _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg);
1597 ERROR_NO_POP();
1598 }
1599 }
1600 Py_DECREF(class_dict);
1601 }
1602
1603 inst(LOAD_DEREF, ( -- value)) {
1604 PyCellObject *cell = (PyCellObject *)GETLOCAL(oparg);
1605 value = PyCell_GetRef(cell);
1606 if (value == NULL) {
1607 _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg);
1608 ERROR_IF(true, error);
1609 }
1610 }
1611
1612 inst(STORE_DEREF, (v --)) {
1613 PyCellObject *cell = (PyCellObject *)GETLOCAL(oparg);
1614 PyCell_SetTakeRef(cell, v);
1615 }
1616
1617 inst(COPY_FREE_VARS, (--)) {
1618 /* Copy closure variables to free variables */
1619 PyCodeObject *co = _PyFrame_GetCode(frame);
1620 assert(PyFunction_Check(frame->f_funcobj));
1621 PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure;
1622 assert(oparg == co->co_nfreevars);
1623 int offset = co->co_nlocalsplus - oparg;
1624 for (int i = 0; i < oparg; ++i) {
1625 PyObject *o = PyTuple_GET_ITEM(closure, i);
1626 frame->localsplus[offset + i] = Py_NewRef(o);
1627 }
1628 }
1629
1630 inst(BUILD_STRING, (pieces[oparg] -- str)) {
1631 str = _PyUnicode_JoinArray(&_Py_STR(empty), pieces, oparg);
1632 DECREF_INPUTS();
1633 ERROR_IF(str == NULL, error);
1634 }
1635
1636 inst(BUILD_TUPLE, (values[oparg] -- tup)) {
1637 tup = _PyTuple_FromArraySteal(values, oparg);
1638 ERROR_IF(tup == NULL, error);
1639 }
1640
1641 inst(BUILD_LIST, (values[oparg] -- list)) {
1642 list = _PyList_FromArraySteal(values, oparg);
1643 ERROR_IF(list == NULL, error);
1644 }
1645
1646 inst(LIST_EXTEND, (list, unused[oparg-1], iterable -- list, unused[oparg-1])) {
1647 PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable);
1648 if (none_val == NULL) {
1649 if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) &&
1650 (Py_TYPE(iterable)->tp_iter == NULL && !PySequence_Check(iterable)))
1651 {
1652 _PyErr_Clear(tstate);
1653 _PyErr_Format(tstate, PyExc_TypeError,
1654 "Value after * must be an iterable, not %.200s",
1655 Py_TYPE(iterable)->tp_name);
1656 }
1657 DECREF_INPUTS();
1658 ERROR_IF(true, error);
1659 }
1660 assert(Py_IsNone(none_val));
1661 DECREF_INPUTS();
1662 }
1663
1664 inst(SET_UPDATE, (set, unused[oparg-1], iterable -- set, unused[oparg-1])) {
1665 int err = _PySet_Update(set, iterable);
1666 DECREF_INPUTS();
1667 ERROR_IF(err < 0, error);
1668 }
1669
1670 inst(BUILD_SET, (values[oparg] -- set)) {
1671 set = PySet_New(NULL);
1672 if (set == NULL)
1673 ERROR_NO_POP();
1674 int err = 0;
1675 for (int i = 0; i < oparg; i++) {
1676 PyObject *item = values[i];
1677 if (err == 0)
1678 err = PySet_Add(set, item);
1679 Py_DECREF(item);
1680 }
1681 if (err != 0) {
1682 Py_DECREF(set);
1683 ERROR_IF(true, error);
1684 }
1685 }
1686
1687 inst(BUILD_MAP, (values[oparg*2] -- map)) {
1688 map = _PyDict_FromItems(
1689 values, 2,
1690 values+1, 2,
1691 oparg);
1692 DECREF_INPUTS();
1693 ERROR_IF(map == NULL, error);
1694 }
1695
1696 inst(SETUP_ANNOTATIONS, (--)) {
1697 int err;
1698 PyObject *ann_dict;
1699 if (LOCALS() == NULL) {
1700 _PyErr_Format(tstate, PyExc_SystemError,
1701 "no locals found when setting up annotations");
1702 ERROR_IF(true, error);
1703 }
1704 /* check if __annotations__ in locals()... */
1705 ERROR_IF(PyMapping_GetOptionalItem(LOCALS(), &_Py_ID(__annotations__), &ann_dict) < 0, error);
1706 if (ann_dict == NULL) {
1707 ann_dict = PyDict_New();
1708 ERROR_IF(ann_dict == NULL, error);
1709 err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__),
1710 ann_dict);
1711 Py_DECREF(ann_dict);
1712 ERROR_IF(err, error);
1713 }
1714 else {
1715 Py_DECREF(ann_dict);
1716 }
1717 }
1718
1719 inst(BUILD_CONST_KEY_MAP, (values[oparg], keys -- map)) {
1720 assert(PyTuple_CheckExact(keys));
1721 assert(PyTuple_GET_SIZE(keys) == (Py_ssize_t)oparg);
1722 map = _PyDict_FromItems(
1723 &PyTuple_GET_ITEM(keys, 0), 1,
1724 values, 1, oparg);
1725 DECREF_INPUTS();
1726 ERROR_IF(map == NULL, error);
1727 }
1728
1729 inst(DICT_UPDATE, (dict, unused[oparg - 1], update -- dict, unused[oparg - 1])) {
1730 if (PyDict_Update(dict, update) < 0) {
1731 if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) {
1732 _PyErr_Format(tstate, PyExc_TypeError,
1733 "'%.200s' object is not a mapping",
1734 Py_TYPE(update)->tp_name);
1735 }
1736 DECREF_INPUTS();
1737 ERROR_IF(true, error);
1738 }
1739 DECREF_INPUTS();
1740 }
1741
1742 inst(DICT_MERGE, (callable, unused, unused, dict, unused[oparg - 1], update -- callable, unused, unused, dict, unused[oparg - 1])) {
1743 if (_PyDict_MergeEx(dict, update, 2) < 0) {
1744 _PyEval_FormatKwargsError(tstate, callable, update);
1745 DECREF_INPUTS();
1746 ERROR_IF(true, error);
1747 }
1748 DECREF_INPUTS();
1749 }
1750
1751 inst(MAP_ADD, (dict, unused[oparg - 1], key, value -- dict, unused[oparg - 1])) {
1752 assert(PyDict_CheckExact(dict));
1753 /* dict[key] = value */
1754 // Do not DECREF INPUTS because the function steals the references
1755 ERROR_IF(_PyDict_SetItem_Take2((PyDictObject *)dict, key, value) != 0, error);
1756 }
1757
1758 inst(INSTRUMENTED_LOAD_SUPER_ATTR, (unused/1, unused, unused, unused -- unused, unused if (oparg & 1))) {
1759 // cancel out the decrement that will happen in LOAD_SUPER_ATTR; we
1760 // don't want to specialize instrumented instructions
1761 PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter);
1762 GO_TO_INSTRUCTION(LOAD_SUPER_ATTR);
1763 }
1764
1765 family(LOAD_SUPER_ATTR, INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR) = {
1766 LOAD_SUPER_ATTR_ATTR,
1767 LOAD_SUPER_ATTR_METHOD,
1768 };
1769
1770 specializing op(_SPECIALIZE_LOAD_SUPER_ATTR, (counter/1, global_super, class, unused -- global_super, class, unused)) {
1771 #if ENABLE_SPECIALIZATION
1772 int load_method = oparg & 1;
1773 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
1774 next_instr = this_instr;
1775 _Py_Specialize_LoadSuperAttr(global_super, class, next_instr, load_method);
1776 DISPATCH_SAME_OPARG();
1777 }
1778 STAT_INC(LOAD_SUPER_ATTR, deferred);
1779 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
1780 #endif /* ENABLE_SPECIALIZATION */
1781 }
1782
1783 tier1 op(_LOAD_SUPER_ATTR, (global_super, class, self -- attr, null if (oparg & 1))) {
1784 if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
1785 PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
1786 int err = _Py_call_instrumentation_2args(
1787 tstate, PY_MONITORING_EVENT_CALL,
1788 frame, this_instr, global_super, arg);
1789 ERROR_IF(err, error);
1790 }
1791 // we make no attempt to optimize here; specializations should
1792 // handle any case whose performance we care about
1793 PyObject *stack[] = {class, self};
1794 PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
1795 if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
1796 PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
1797 if (super == NULL) {
1798 _Py_call_instrumentation_exc2(
1799 tstate, PY_MONITORING_EVENT_C_RAISE,
1800 frame, this_instr, global_super, arg);
1801 }
1802 else {
1803 int err = _Py_call_instrumentation_2args(
1804 tstate, PY_MONITORING_EVENT_C_RETURN,
1805 frame, this_instr, global_super, arg);
1806 if (err < 0) {
1807 Py_CLEAR(super);
1808 }
1809 }
1810 }
1811 DECREF_INPUTS();
1812 ERROR_IF(super == NULL, error);
1813 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
1814 attr = PyObject_GetAttr(super, name);
1815 Py_DECREF(super);
1816 ERROR_IF(attr == NULL, error);
1817 null = NULL;
1818 }
1819
1820 macro(LOAD_SUPER_ATTR) = _SPECIALIZE_LOAD_SUPER_ATTR + _LOAD_SUPER_ATTR;
1821
1822 pseudo(LOAD_SUPER_METHOD) = {
1823 LOAD_SUPER_ATTR,
1824 };
1825
1826 pseudo(LOAD_ZERO_SUPER_METHOD) = {
1827 LOAD_SUPER_ATTR,
1828 };
1829
1830 pseudo(LOAD_ZERO_SUPER_ATTR) = {
1831 LOAD_SUPER_ATTR,
1832 };
1833
1834 inst(LOAD_SUPER_ATTR_ATTR, (unused/1, global_super, class, self -- attr, unused if (0))) {
1835 assert(!(oparg & 1));
1836 DEOPT_IF(global_super != (PyObject *)&PySuper_Type);
1837 DEOPT_IF(!PyType_Check(class));
1838 STAT_INC(LOAD_SUPER_ATTR, hit);
1839 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
1840 attr = _PySuper_Lookup((PyTypeObject *)class, self, name, NULL);
1841 DECREF_INPUTS();
1842 ERROR_IF(attr == NULL, error);
1843 }
1844
1845 inst(LOAD_SUPER_ATTR_METHOD, (unused/1, global_super, class, self -- attr, self_or_null)) {
1846 assert(oparg & 1);
1847 DEOPT_IF(global_super != (PyObject *)&PySuper_Type);
1848 DEOPT_IF(!PyType_Check(class));
1849 STAT_INC(LOAD_SUPER_ATTR, hit);
1850 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
1851 PyTypeObject *cls = (PyTypeObject *)class;
1852 int method_found = 0;
1853 attr = _PySuper_Lookup(cls, self, name,
1854 Py_TYPE(self)->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL);
1855 Py_DECREF(global_super);
1856 Py_DECREF(class);
1857 if (attr == NULL) {
1858 Py_DECREF(self);
1859 ERROR_IF(true, error);
1860 }
1861 if (method_found) {
1862 self_or_null = self; // transfer ownership
1863 } else {
1864 Py_DECREF(self);
1865 self_or_null = NULL;
1866 }
1867 }
1868
1869 family(LOAD_ATTR, INLINE_CACHE_ENTRIES_LOAD_ATTR) = {
1870 LOAD_ATTR_INSTANCE_VALUE,
1871 LOAD_ATTR_MODULE,
1872 LOAD_ATTR_WITH_HINT,
1873 LOAD_ATTR_SLOT,
1874 LOAD_ATTR_CLASS,
1875 LOAD_ATTR_PROPERTY,
1876 LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN,
1877 LOAD_ATTR_METHOD_WITH_VALUES,
1878 LOAD_ATTR_METHOD_NO_DICT,
1879 LOAD_ATTR_METHOD_LAZY_DICT,
1880 LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES,
1881 LOAD_ATTR_NONDESCRIPTOR_NO_DICT,
1882 };
1883
1884 specializing op(_SPECIALIZE_LOAD_ATTR, (counter/1, owner -- owner)) {
1885 #if ENABLE_SPECIALIZATION
1886 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
1887 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
1888 next_instr = this_instr;
1889 _Py_Specialize_LoadAttr(owner, next_instr, name);
1890 DISPATCH_SAME_OPARG();
1891 }
1892 STAT_INC(LOAD_ATTR, deferred);
1893 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
1894 #endif /* ENABLE_SPECIALIZATION */
1895 }
1896
1897 op(_LOAD_ATTR, (owner -- attr, self_or_null if (oparg & 1))) {
1898 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
1899 if (oparg & 1) {
1900 /* Designed to work in tandem with CALL, pushes two values. */
1901 attr = NULL;
1902 if (_PyObject_GetMethod(owner, name, &attr)) {
1903 /* We can bypass temporary bound method object.
1904 meth is unbound method and obj is self.
1905 meth | self | arg1 | ... | argN
1906 */
1907 assert(attr != NULL); // No errors on this branch
1908 self_or_null = owner; // Transfer ownership
1909 }
1910 else {
1911 /* meth is not an unbound method (but a regular attr, or
1912 something was returned by a descriptor protocol). Set
1913 the second element of the stack to NULL, to signal
1914 CALL that it's not a method call.
1915 meth | NULL | arg1 | ... | argN
1916 */
1917 DECREF_INPUTS();
1918 ERROR_IF(attr == NULL, error);
1919 self_or_null = NULL;
1920 }
1921 }
1922 else {
1923 /* Classic, pushes one value. */
1924 attr = PyObject_GetAttr(owner, name);
1925 DECREF_INPUTS();
1926 ERROR_IF(attr == NULL, error);
1927 }
1928 }
1929
1930 macro(LOAD_ATTR) =
1931 _SPECIALIZE_LOAD_ATTR +
1932 unused/8 +
1933 _LOAD_ATTR;
1934
1935 pseudo(LOAD_METHOD) = {
1936 LOAD_ATTR,
1937 };
1938
1939 op(_GUARD_TYPE_VERSION, (type_version/2, owner -- owner)) {
1940 PyTypeObject *tp = Py_TYPE(owner);
1941 assert(type_version != 0);
1942 EXIT_IF(tp->tp_version_tag != type_version);
1943 }
1944
1945 op(_CHECK_MANAGED_OBJECT_HAS_VALUES, (owner -- owner)) {
1946 assert(Py_TYPE(owner)->tp_dictoffset < 0);
1947 assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
1948 DEOPT_IF(!_PyObject_InlineValues(owner)->valid);
1949 }
1950
1951 split op(_LOAD_ATTR_INSTANCE_VALUE, (index/1, owner -- attr, null if (oparg & 1))) {
1952 attr = _PyObject_InlineValues(owner)->values[index];
1953 DEOPT_IF(attr == NULL);
1954 STAT_INC(LOAD_ATTR, hit);
1955 Py_INCREF(attr);
1956 null = NULL;
1957 DECREF_INPUTS();
1958 }
1959
1960 macro(LOAD_ATTR_INSTANCE_VALUE) =
1961 unused/1 + // Skip over the counter
1962 _GUARD_TYPE_VERSION +
1963 _CHECK_MANAGED_OBJECT_HAS_VALUES +
1964 _LOAD_ATTR_INSTANCE_VALUE +
1965 unused/5; // Skip over rest of cache
1966
1967 op(_CHECK_ATTR_MODULE, (dict_version/2, owner -- owner)) {
1968 DEOPT_IF(!PyModule_CheckExact(owner));
1969 PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict;
1970 assert(dict != NULL);
1971 DEOPT_IF(dict->ma_keys->dk_version != dict_version);
1972 }
1973
1974 op(_LOAD_ATTR_MODULE, (index/1, owner -- attr, null if (oparg & 1))) {
1975 PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict;
1976 assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE);
1977 assert(index < dict->ma_keys->dk_nentries);
1978 PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + index;
1979 attr = ep->me_value;
1980 DEOPT_IF(attr == NULL);
1981 STAT_INC(LOAD_ATTR, hit);
1982 Py_INCREF(attr);
1983 null = NULL;
1984 DECREF_INPUTS();
1985 }
1986
1987 macro(LOAD_ATTR_MODULE) =
1988 unused/1 +
1989 _CHECK_ATTR_MODULE +
1990 _LOAD_ATTR_MODULE +
1991 unused/5;
1992
1993 op(_CHECK_ATTR_WITH_HINT, (owner -- owner)) {
1994 assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
1995 PyDictObject *dict = _PyObject_GetManagedDict(owner);
1996 DEOPT_IF(dict == NULL);
1997 assert(PyDict_CheckExact((PyObject *)dict));
1998 }
1999
2000 op(_LOAD_ATTR_WITH_HINT, (hint/1, owner -- attr, null if (oparg & 1))) {
2001 PyDictObject *dict = _PyObject_GetManagedDict(owner);
2002 DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries);
2003 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
2004 if (DK_IS_UNICODE(dict->ma_keys)) {
2005 PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
2006 DEOPT_IF(ep->me_key != name);
2007 attr = ep->me_value;
2008 }
2009 else {
2010 PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint;
2011 DEOPT_IF(ep->me_key != name);
2012 attr = ep->me_value;
2013 }
2014 DEOPT_IF(attr == NULL);
2015 STAT_INC(LOAD_ATTR, hit);
2016 Py_INCREF(attr);
2017 null = NULL;
2018 DECREF_INPUTS();
2019 }
2020
2021 macro(LOAD_ATTR_WITH_HINT) =
2022 unused/1 +
2023 _GUARD_TYPE_VERSION +
2024 _CHECK_ATTR_WITH_HINT +
2025 _LOAD_ATTR_WITH_HINT +
2026 unused/5;
2027
2028 split op(_LOAD_ATTR_SLOT, (index/1, owner -- attr, null if (oparg & 1))) {
2029 char *addr = (char *)owner + index;
2030 attr = *(PyObject **)addr;
2031 DEOPT_IF(attr == NULL);
2032 STAT_INC(LOAD_ATTR, hit);
2033 Py_INCREF(attr);
2034 null = NULL;
2035 DECREF_INPUTS();
2036 }
2037
2038 macro(LOAD_ATTR_SLOT) =
2039 unused/1 +
2040 _GUARD_TYPE_VERSION +
2041 _LOAD_ATTR_SLOT + // NOTE: This action may also deopt
2042 unused/5;
2043
2044 op(_CHECK_ATTR_CLASS, (type_version/2, owner -- owner)) {
2045 DEOPT_IF(!PyType_Check(owner));
2046 assert(type_version != 0);
2047 DEOPT_IF(((PyTypeObject *)owner)->tp_version_tag != type_version);
2048
2049 }
2050
2051 split op(_LOAD_ATTR_CLASS, (descr/4, owner -- attr, null if (oparg & 1))) {
2052 STAT_INC(LOAD_ATTR, hit);
2053 assert(descr != NULL);
2054 attr = Py_NewRef(descr);
2055 null = NULL;
2056 DECREF_INPUTS();
2057 }
2058
2059 macro(LOAD_ATTR_CLASS) =
2060 unused/1 +
2061 _CHECK_ATTR_CLASS +
2062 unused/2 +
2063 _LOAD_ATTR_CLASS;
2064
2065 inst(LOAD_ATTR_PROPERTY, (unused/1, type_version/2, func_version/2, fget/4, owner -- unused, unused if (0))) {
2066 assert((oparg & 1) == 0);
2067 DEOPT_IF(tstate->interp->eval_frame);
2068
2069 PyTypeObject *cls = Py_TYPE(owner);
2070 assert(type_version != 0);
2071 DEOPT_IF(cls->tp_version_tag != type_version);
2072 assert(Py_IS_TYPE(fget, &PyFunction_Type));
2073 PyFunctionObject *f = (PyFunctionObject *)fget;
2074 assert(func_version != 0);
2075 DEOPT_IF(f->func_version != func_version);
2076 PyCodeObject *code = (PyCodeObject *)f->func_code;
2077 assert(code->co_argcount == 1);
2078 DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize));
2079 STAT_INC(LOAD_ATTR, hit);
2080 Py_INCREF(fget);
2081 _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 1);
2082 // Manipulate stack directly because we exit with DISPATCH_INLINED().
2083 STACK_SHRINK(1);
2084 new_frame->localsplus[0] = owner;
2085 frame->return_offset = (uint16_t)(next_instr - this_instr);
2086 DISPATCH_INLINED(new_frame);
2087 }
2088
2089 inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, (unused/1, type_version/2, func_version/2, getattribute/4, owner -- unused, unused if (0))) {
2090 assert((oparg & 1) == 0);
2091 DEOPT_IF(tstate->interp->eval_frame);
2092 PyTypeObject *cls = Py_TYPE(owner);
2093 assert(type_version != 0);
2094 DEOPT_IF(cls->tp_version_tag != type_version);
2095 assert(Py_IS_TYPE(getattribute, &PyFunction_Type));
2096 PyFunctionObject *f = (PyFunctionObject *)getattribute;
2097 assert(func_version != 0);
2098 DEOPT_IF(f->func_version != func_version);
2099 PyCodeObject *code = (PyCodeObject *)f->func_code;
2100 assert(code->co_argcount == 2);
2101 DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize));
2102 STAT_INC(LOAD_ATTR, hit);
2103
2104 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
2105 Py_INCREF(f);
2106 _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 2);
2107 // Manipulate stack directly because we exit with DISPATCH_INLINED().
2108 STACK_SHRINK(1);
2109 new_frame->localsplus[0] = owner;
2110 new_frame->localsplus[1] = Py_NewRef(name);
2111 frame->return_offset = (uint16_t)(next_instr - this_instr);
2112 DISPATCH_INLINED(new_frame);
2113 }
2114
2115 op(_GUARD_DORV_NO_DICT, (owner -- owner)) {
2116 assert(Py_TYPE(owner)->tp_dictoffset < 0);
2117 assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
2118 DEOPT_IF(_PyObject_GetManagedDict(owner));
2119 DEOPT_IF(_PyObject_InlineValues(owner)->valid == 0);
2120 }
2121
2122 op(_STORE_ATTR_INSTANCE_VALUE, (index/1, value, owner --)) {
2123 STAT_INC(STORE_ATTR, hit);
2124 assert(_PyObject_GetManagedDict(owner) == NULL);
2125 PyDictValues *values = _PyObject_InlineValues(owner);
2126
2127 PyObject *old_value = values->values[index];
2128 values->values[index] = value;
2129 if (old_value == NULL) {
2130 _PyDictValues_AddToInsertionOrder(values, index);
2131 }
2132 else {
2133 Py_DECREF(old_value);
2134 }
2135
2136 Py_DECREF(owner);
2137 }
2138
2139 macro(STORE_ATTR_INSTANCE_VALUE) =
2140 unused/1 +
2141 _GUARD_TYPE_VERSION +
2142 _GUARD_DORV_NO_DICT +
2143 _STORE_ATTR_INSTANCE_VALUE;
2144
2145 inst(STORE_ATTR_WITH_HINT, (unused/1, type_version/2, hint/1, value, owner --)) {
2146 PyTypeObject *tp = Py_TYPE(owner);
2147 assert(type_version != 0);
2148 DEOPT_IF(tp->tp_version_tag != type_version);
2149 assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT);
2150 PyDictObject *dict = _PyObject_GetManagedDict(owner);
2151 DEOPT_IF(dict == NULL);
2152 assert(PyDict_CheckExact((PyObject *)dict));
2153 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
2154 DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries);
2155 PyObject *old_value;
2156 uint64_t new_version;
2157 if (DK_IS_UNICODE(dict->ma_keys)) {
2158 PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
2159 DEOPT_IF(ep->me_key != name);
2160 old_value = ep->me_value;
2161 DEOPT_IF(old_value == NULL);
2162 new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, value);
2163 ep->me_value = value;
2164 }
2165 else {
2166 PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint;
2167 DEOPT_IF(ep->me_key != name);
2168 old_value = ep->me_value;
2169 DEOPT_IF(old_value == NULL);
2170 new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, value);
2171 ep->me_value = value;
2172 }
2173 /* Ensure dict is GC tracked if it needs to be */
2174 if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) {
2175 _PyObject_GC_TRACK(dict);
2176 }
2177 dict->ma_version_tag = new_version; // PEP 509
2178 // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault,
2179 // when dict only holds the strong reference to value in ep->me_value.
2180 Py_DECREF(old_value);
2181 STAT_INC(STORE_ATTR, hit);
2182 Py_DECREF(owner);
2183 }
2184
2185 op(_STORE_ATTR_SLOT, (index/1, value, owner --)) {
2186 char *addr = (char *)owner + index;
2187 STAT_INC(STORE_ATTR, hit);
2188 PyObject *old_value = *(PyObject **)addr;
2189 *(PyObject **)addr = value;
2190 Py_XDECREF(old_value);
2191 Py_DECREF(owner);
2192 }
2193
2194 macro(STORE_ATTR_SLOT) =
2195 unused/1 +
2196 _GUARD_TYPE_VERSION +
2197 _STORE_ATTR_SLOT;
2198
2199 family(COMPARE_OP, INLINE_CACHE_ENTRIES_COMPARE_OP) = {
2200 COMPARE_OP_FLOAT,
2201 COMPARE_OP_INT,
2202 COMPARE_OP_STR,
2203 };
2204
2205 specializing op(_SPECIALIZE_COMPARE_OP, (counter/1, left, right -- left, right)) {
2206 #if ENABLE_SPECIALIZATION
2207 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
2208 next_instr = this_instr;
2209 _Py_Specialize_CompareOp(left, right, next_instr, oparg);
2210 DISPATCH_SAME_OPARG();
2211 }
2212 STAT_INC(COMPARE_OP, deferred);
2213 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
2214 #endif /* ENABLE_SPECIALIZATION */
2215 }
2216
2217 op(_COMPARE_OP, (left, right -- res)) {
2218 assert((oparg >> 5) <= Py_GE);
2219 res = PyObject_RichCompare(left, right, oparg >> 5);
2220 DECREF_INPUTS();
2221 ERROR_IF(res == NULL, error);
2222 if (oparg & 16) {
2223 int res_bool = PyObject_IsTrue(res);
2224 Py_DECREF(res);
2225 ERROR_IF(res_bool < 0, error);
2226 res = res_bool ? Py_True : Py_False;
2227 }
2228 }
2229
2230 macro(COMPARE_OP) = _SPECIALIZE_COMPARE_OP + _COMPARE_OP;
2231
2232 macro(COMPARE_OP_FLOAT) =
2233 _GUARD_BOTH_FLOAT + unused/1 + _COMPARE_OP_FLOAT;
2234
2235 macro(COMPARE_OP_INT) =
2236 _GUARD_BOTH_INT + unused/1 + _COMPARE_OP_INT;
2237
2238 macro(COMPARE_OP_STR) =
2239 _GUARD_BOTH_UNICODE + unused/1 + _COMPARE_OP_STR;
2240
2241 op(_COMPARE_OP_FLOAT, (left, right -- res)) {
2242 STAT_INC(COMPARE_OP, hit);
2243 double dleft = PyFloat_AS_DOUBLE(left);
2244 double dright = PyFloat_AS_DOUBLE(right);
2245 // 1 if NaN, 2 if <, 4 if >, 8 if ==; this matches low four bits of the oparg
2246 int sign_ish = COMPARISON_BIT(dleft, dright);
2247 _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc);
2248 _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
2249 res = (sign_ish & oparg) ? Py_True : Py_False;
2250 // It's always a bool, so we don't care about oparg & 16.
2251 }
2252
2253 // Similar to COMPARE_OP_FLOAT
2254 op(_COMPARE_OP_INT, (left, right -- res)) {
2255 DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left));
2256 DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)right));
2257 STAT_INC(COMPARE_OP, hit);
2258 assert(_PyLong_DigitCount((PyLongObject *)left) <= 1 &&
2259 _PyLong_DigitCount((PyLongObject *)right) <= 1);
2260 Py_ssize_t ileft = _PyLong_CompactValue((PyLongObject *)left);
2261 Py_ssize_t iright = _PyLong_CompactValue((PyLongObject *)right);
2262 // 2 if <, 4 if >, 8 if ==; this matches the low 4 bits of the oparg
2263 int sign_ish = COMPARISON_BIT(ileft, iright);
2264 _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
2265 _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
2266 res = (sign_ish & oparg) ? Py_True : Py_False;
2267 // It's always a bool, so we don't care about oparg & 16.
2268 }
2269
2270 // Similar to COMPARE_OP_FLOAT, but for ==, != only
2271 op(_COMPARE_OP_STR, (left, right -- res)) {
2272 STAT_INC(COMPARE_OP, hit);
2273 int eq = _PyUnicode_Equal(left, right);
2274 assert((oparg >> 5) == Py_EQ || (oparg >> 5) == Py_NE);
2275 _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc);
2276 _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
2277 assert(eq == 0 || eq == 1);
2278 assert((oparg & 0xf) == COMPARISON_NOT_EQUALS || (oparg & 0xf) == COMPARISON_EQUALS);
2279 assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS);
2280 res = ((COMPARISON_NOT_EQUALS + eq) & oparg) ? Py_True : Py_False;
2281 // It's always a bool, so we don't care about oparg & 16.
2282 }
2283
2284 inst(IS_OP, (left, right -- b)) {
2285 int res = Py_Is(left, right) ^ oparg;
2286 DECREF_INPUTS();
2287 b = res ? Py_True : Py_False;
2288 }
2289
2290 family(CONTAINS_OP, INLINE_CACHE_ENTRIES_CONTAINS_OP) = {
2291 CONTAINS_OP_SET,
2292 CONTAINS_OP_DICT,
2293 };
2294
2295 op(_CONTAINS_OP, (left, right -- b)) {
2296 int res = PySequence_Contains(right, left);
2297 DECREF_INPUTS();
2298 ERROR_IF(res < 0, error);
2299 b = (res ^ oparg) ? Py_True : Py_False;
2300 }
2301
2302 specializing op(_SPECIALIZE_CONTAINS_OP, (counter/1, left, right -- left, right)) {
2303 #if ENABLE_SPECIALIZATION
2304 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
2305 next_instr = this_instr;
2306 _Py_Specialize_ContainsOp(right, next_instr);
2307 DISPATCH_SAME_OPARG();
2308 }
2309 STAT_INC(CONTAINS_OP, deferred);
2310 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
2311 #endif /* ENABLE_SPECIALIZATION */
2312 }
2313
2314 macro(CONTAINS_OP) = _SPECIALIZE_CONTAINS_OP + _CONTAINS_OP;
2315
2316 inst(CONTAINS_OP_SET, (unused/1, left, right -- b)) {
2317 DEOPT_IF(!(PySet_CheckExact(right) || PyFrozenSet_CheckExact(right)));
2318 STAT_INC(CONTAINS_OP, hit);
2319 // Note: both set and frozenset use the same seq_contains method!
2320 int res = _PySet_Contains((PySetObject *)right, left);
2321 DECREF_INPUTS();
2322 ERROR_IF(res < 0, error);
2323 b = (res ^ oparg) ? Py_True : Py_False;
2324 }
2325
2326 inst(CONTAINS_OP_DICT, (unused/1, left, right -- b)) {
2327 DEOPT_IF(!PyDict_CheckExact(right));
2328 STAT_INC(CONTAINS_OP, hit);
2329 int res = PyDict_Contains(right, left);
2330 DECREF_INPUTS();
2331 ERROR_IF(res < 0, error);
2332 b = (res ^ oparg) ? Py_True : Py_False;
2333 }
2334
2335 inst(CHECK_EG_MATCH, (exc_value, match_type -- rest, match)) {
2336 if (_PyEval_CheckExceptStarTypeValid(tstate, match_type) < 0) {
2337 DECREF_INPUTS();
2338 ERROR_IF(true, error);
2339 }
2340
2341 match = NULL;
2342 rest = NULL;
2343 int res = _PyEval_ExceptionGroupMatch(exc_value, match_type,
2344 &match, &rest);
2345 DECREF_INPUTS();
2346 ERROR_IF(res < 0, error);
2347
2348 assert((match == NULL) == (rest == NULL));
2349 ERROR_IF(match == NULL, error);
2350
2351 if (!Py_IsNone(match)) {
2352 PyErr_SetHandledException(match);
2353 }
2354 }
2355
2356 inst(CHECK_EXC_MATCH, (left, right -- left, b)) {
2357 assert(PyExceptionInstance_Check(left));
2358 if (_PyEval_CheckExceptTypeValid(tstate, right) < 0) {
2359 DECREF_INPUTS();
2360 ERROR_IF(true, error);
2361 }
2362
2363 int res = PyErr_GivenExceptionMatches(left, right);
2364 DECREF_INPUTS();
2365 b = res ? Py_True : Py_False;
2366 }
2367
2368 tier1 inst(IMPORT_NAME, (level, fromlist -- res)) {
2369 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
2370 res = import_name(tstate, frame, name, fromlist, level);
2371 DECREF_INPUTS();
2372 ERROR_IF(res == NULL, error);
2373 }
2374
2375 tier1 inst(IMPORT_FROM, (from -- from, res)) {
2376 PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
2377 res = import_from(tstate, from, name);
2378 ERROR_IF(res == NULL, error);
2379 }
2380
2381 tier1 inst(JUMP_FORWARD, (--)) {
2382 JUMPBY(oparg);
2383 }
2384
2385 tier1 inst(JUMP_BACKWARD, (unused/1 --)) {
2386 CHECK_EVAL_BREAKER();
2387 assert(oparg <= INSTR_OFFSET());
2388 JUMPBY(-oparg);
2389 #ifdef _Py_TIER2
2390 #if ENABLE_SPECIALIZATION
2391 _Py_BackoffCounter counter = this_instr[1].counter;
2392 if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD) {
2393 _Py_CODEUNIT *start = this_instr;
2394 /* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */
2395 while (oparg > 255) {
2396 oparg >>= 8;
2397 start--;
2398 }
2399 _PyExecutorObject *executor;
2400 int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor);
2401 ERROR_IF(optimized < 0, error);
2402 if (optimized) {
2403 assert(tstate->previous_executor == NULL);
2404 tstate->previous_executor = Py_None;
2405 GOTO_TIER_TWO(executor);
2406 }
2407 else {
2408 this_instr[1].counter = restart_backoff_counter(counter);
2409 }
2410 }
2411 else {
2412 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
2413 }
2414 #endif /* ENABLE_SPECIALIZATION */
2415 #endif /* _Py_TIER2 */
2416 }
2417
2418 pseudo(JUMP) = {
2419 JUMP_FORWARD,
2420 JUMP_BACKWARD,
2421 };
2422
2423 pseudo(JUMP_NO_INTERRUPT) = {
2424 JUMP_FORWARD,
2425 JUMP_BACKWARD_NO_INTERRUPT,
2426 };
2427
2428 tier1 inst(ENTER_EXECUTOR, (--)) {
2429 #ifdef _Py_TIER2
2430 PyCodeObject *code = _PyFrame_GetCode(frame);
2431 _PyExecutorObject *executor = code->co_executors->executors[oparg & 255];
2432 assert(executor->vm_data.index == INSTR_OFFSET() - 1);
2433 assert(executor->vm_data.code == code);
2434 assert(executor->vm_data.valid);
2435 assert(tstate->previous_executor == NULL);
2436 /* If the eval breaker is set then stay in tier 1.
2437 * This avoids any potentially infinite loops
2438 * involving _RESUME_CHECK */
2439 if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) {
2440 opcode = executor->vm_data.opcode;
2441 oparg = (oparg & ~255) | executor->vm_data.oparg;
2442 next_instr = this_instr;
2443 if (_PyOpcode_Caches[_PyOpcode_Deopt[opcode]]) {
2444 PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter);
2445 }
2446 DISPATCH_GOTO();
2447 }
2448 tstate->previous_executor = Py_None;
2449 Py_INCREF(executor);
2450 GOTO_TIER_TWO(executor);
2451 #else
2452 Py_FatalError("ENTER_EXECUTOR is not supported in this build");
2453 #endif /* _Py_TIER2 */
2454 }
2455
2456 replaced op(_POP_JUMP_IF_FALSE, (cond -- )) {
2457 assert(PyBool_Check(cond));
2458 int flag = Py_IsFalse(cond);
2459 #if ENABLE_SPECIALIZATION
2460 this_instr[1].cache = (this_instr[1].cache << 1) | flag;
2461 #endif
2462 JUMPBY(oparg * flag);
2463 }
2464
2465 replaced op(_POP_JUMP_IF_TRUE, (cond -- )) {
2466 assert(PyBool_Check(cond));
2467 int flag = Py_IsTrue(cond);
2468 #if ENABLE_SPECIALIZATION
2469 this_instr[1].cache = (this_instr[1].cache << 1) | flag;
2470 #endif
2471 JUMPBY(oparg * flag);
2472 }
2473
2474 op(_IS_NONE, (value -- b)) {
2475 if (Py_IsNone(value)) {
2476 b = Py_True;
2477 }
2478 else {
2479 b = Py_False;
2480 DECREF_INPUTS();
2481 }
2482 }
2483
2484 macro(POP_JUMP_IF_TRUE) = unused/1 + _POP_JUMP_IF_TRUE;
2485
2486 macro(POP_JUMP_IF_FALSE) = unused/1 + _POP_JUMP_IF_FALSE;
2487
2488 macro(POP_JUMP_IF_NONE) = unused/1 + _IS_NONE + _POP_JUMP_IF_TRUE;
2489
2490 macro(POP_JUMP_IF_NOT_NONE) = unused/1 + _IS_NONE + _POP_JUMP_IF_FALSE;
2491
2492 tier1 inst(JUMP_BACKWARD_NO_INTERRUPT, (--)) {
2493 /* This bytecode is used in the `yield from` or `await` loop.
2494 * If there is an interrupt, we want it handled in the innermost
2495 * generator or coroutine, so we deliberately do not check it here.
2496 * (see bpo-30039).
2497 */
2498 JUMPBY(-oparg);
2499 }
2500
2501 inst(GET_LEN, (obj -- obj, len_o)) {
2502 // PUSH(len(TOS))
2503 Py_ssize_t len_i = PyObject_Length(obj);
2504 ERROR_IF(len_i < 0, error);
2505 len_o = PyLong_FromSsize_t(len_i);
2506 ERROR_IF(len_o == NULL, error);
2507 }
2508
2509 inst(MATCH_CLASS, (subject, type, names -- attrs)) {
2510 // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or
2511 // None on failure.
2512 assert(PyTuple_CheckExact(names));
2513 attrs = _PyEval_MatchClass(tstate, subject, type, oparg, names);
2514 DECREF_INPUTS();
2515 if (attrs) {
2516 assert(PyTuple_CheckExact(attrs)); // Success!
2517 }
2518 else {
2519 ERROR_IF(_PyErr_Occurred(tstate), error); // Error!
2520 attrs = Py_None; // Failure!
2521 }
2522 }
2523
2524 inst(MATCH_MAPPING, (subject -- subject, res)) {
2525 int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING;
2526 res = match ? Py_True : Py_False;
2527 }
2528
2529 inst(MATCH_SEQUENCE, (subject -- subject, res)) {
2530 int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE;
2531 res = match ? Py_True : Py_False;
2532 }
2533
2534 inst(MATCH_KEYS, (subject, keys -- subject, keys, values_or_none)) {
2535 // On successful match, PUSH(values). Otherwise, PUSH(None).
2536 values_or_none = _PyEval_MatchKeys(tstate, subject, keys);
2537 ERROR_IF(values_or_none == NULL, error);
2538 }
2539
2540 inst(GET_ITER, (iterable -- iter)) {
2541 /* before: [obj]; after [getiter(obj)] */
2542 iter = PyObject_GetIter(iterable);
2543 DECREF_INPUTS();
2544 ERROR_IF(iter == NULL, error);
2545 }
2546
2547 inst(GET_YIELD_FROM_ITER, (iterable -- iter)) {
2548 /* before: [obj]; after [getiter(obj)] */
2549 if (PyCoro_CheckExact(iterable)) {
2550 /* `iterable` is a coroutine */
2551 if (!(_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_ITERABLE_COROUTINE))) {
2552 /* and it is used in a 'yield from' expression of a
2553 regular generator. */
2554 _PyErr_SetString(tstate, PyExc_TypeError,
2555 "cannot 'yield from' a coroutine object "
2556 "in a non-coroutine generator");
2557 ERROR_NO_POP();
2558 }
2559 iter = iterable;
2560 }
2561 else if (PyGen_CheckExact(iterable)) {
2562 iter = iterable;
2563 }
2564 else {
2565 /* `iterable` is not a generator. */
2566 iter = PyObject_GetIter(iterable);
2567 if (iter == NULL) {
2568 ERROR_NO_POP();
2569 }
2570 DECREF_INPUTS();
2571 }
2572 }
2573
2574 // Most members of this family are "secretly" super-instructions.
2575 // When the loop is exhausted, they jump, and the jump target is
2576 // always END_FOR, which pops two values off the stack.
2577 // This is optimized by skipping that instruction and combining
2578 // its effect (popping 'iter' instead of pushing 'next'.)
2579
2580 family(FOR_ITER, INLINE_CACHE_ENTRIES_FOR_ITER) = {
2581 FOR_ITER_LIST,
2582 FOR_ITER_TUPLE,
2583 FOR_ITER_RANGE,
2584 FOR_ITER_GEN,
2585 };
2586
2587 specializing op(_SPECIALIZE_FOR_ITER, (counter/1, iter -- iter)) {
2588 #if ENABLE_SPECIALIZATION
2589 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
2590 next_instr = this_instr;
2591 _Py_Specialize_ForIter(iter, next_instr, oparg);
2592 DISPATCH_SAME_OPARG();
2593 }
2594 STAT_INC(FOR_ITER, deferred);
2595 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
2596 #endif /* ENABLE_SPECIALIZATION */
2597 }
2598
2599 replaced op(_FOR_ITER, (iter -- iter, next)) {
2600 /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
2601 next = (*Py_TYPE(iter)->tp_iternext)(iter);
2602 if (next == NULL) {
2603 if (_PyErr_Occurred(tstate)) {
2604 if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
2605 ERROR_NO_POP();
2606 }
2607 _PyEval_MonitorRaise(tstate, frame, this_instr);
2608 _PyErr_Clear(tstate);
2609 }
2610 /* iterator ended normally */
2611 assert(next_instr[oparg].op.code == END_FOR ||
2612 next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
2613 Py_DECREF(iter);
2614 STACK_SHRINK(1);
2615 /* Jump forward oparg, then skip following END_FOR and POP_TOP instruction */
2616 JUMPBY(oparg + 2);
2617 DISPATCH();
2618 }
2619 // Common case: no jump, leave it to the code generator
2620 }
2621
2622 op(_FOR_ITER_TIER_TWO, (iter -- iter, next)) {
2623 /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
2624 next = (*Py_TYPE(iter)->tp_iternext)(iter);
2625 if (next == NULL) {
2626 if (_PyErr_Occurred(tstate)) {
2627 if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
2628 ERROR_NO_POP();
2629 }
2630 _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr);
2631 _PyErr_Clear(tstate);
2632 }
2633 /* iterator ended normally */
2634 /* The translator sets the deopt target just past the matching END_FOR */
2635 DEOPT_IF(true);
2636 }
2637 // Common case: no jump, leave it to the code generator
2638 }
2639
2640 macro(FOR_ITER) = _SPECIALIZE_FOR_ITER + _FOR_ITER;
2641
2642 inst(INSTRUMENTED_FOR_ITER, (unused/1 -- )) {
2643 _Py_CODEUNIT *target;
2644 PyObject *iter = TOP();
2645 PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter);
2646 if (next != NULL) {
2647 PUSH(next);
2648 target = next_instr;
2649 }
2650 else {
2651 if (_PyErr_Occurred(tstate)) {
2652 if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
2653 ERROR_NO_POP();
2654 }
2655 _PyEval_MonitorRaise(tstate, frame, this_instr);
2656 _PyErr_Clear(tstate);
2657 }
2658 /* iterator ended normally */
2659 assert(next_instr[oparg].op.code == END_FOR ||
2660 next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
2661 STACK_SHRINK(1);
2662 Py_DECREF(iter);
2663 /* Skip END_FOR and POP_TOP */
2664 target = next_instr + oparg + 2;
2665 }
2666 INSTRUMENTED_JUMP(this_instr, target, PY_MONITORING_EVENT_BRANCH);
2667 }
2668
2669 op(_ITER_CHECK_LIST, (iter -- iter)) {
2670 EXIT_IF(Py_TYPE(iter) != &PyListIter_Type);
2671 }
2672
2673 replaced op(_ITER_JUMP_LIST, (iter -- iter)) {
2674 _PyListIterObject *it = (_PyListIterObject *)iter;
2675 assert(Py_TYPE(iter) == &PyListIter_Type);
2676 STAT_INC(FOR_ITER, hit);
2677 PyListObject *seq = it->it_seq;
2678 if (seq == NULL || (size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
2679 it->it_index = -1;
2680 #ifndef Py_GIL_DISABLED
2681 if (seq != NULL) {
2682 it->it_seq = NULL;
2683 Py_DECREF(seq);
2684 }
2685 #endif
2686 Py_DECREF(iter);
2687 STACK_SHRINK(1);
2688 /* Jump forward oparg, then skip following END_FOR and POP_TOP instructions */
2689 JUMPBY(oparg + 2);
2690 DISPATCH();
2691 }
2692 }
2693
2694 // Only used by Tier 2
2695 op(_GUARD_NOT_EXHAUSTED_LIST, (iter -- iter)) {
2696 _PyListIterObject *it = (_PyListIterObject *)iter;
2697 assert(Py_TYPE(iter) == &PyListIter_Type);
2698 PyListObject *seq = it->it_seq;
2699 EXIT_IF(seq == NULL);
2700 if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
2701 it->it_index = -1;
2702 EXIT_IF(1);
2703 }
2704 }
2705
2706 op(_ITER_NEXT_LIST, (iter -- iter, next)) {
2707 _PyListIterObject *it = (_PyListIterObject *)iter;
2708 assert(Py_TYPE(iter) == &PyListIter_Type);
2709 PyListObject *seq = it->it_seq;
2710 assert(seq);
2711 assert(it->it_index < PyList_GET_SIZE(seq));
2712 next = Py_NewRef(PyList_GET_ITEM(seq, it->it_index++));
2713 }
2714
2715 macro(FOR_ITER_LIST) =
2716 unused/1 + // Skip over the counter
2717 _ITER_CHECK_LIST +
2718 _ITER_JUMP_LIST +
2719 _ITER_NEXT_LIST;
2720
2721 op(_ITER_CHECK_TUPLE, (iter -- iter)) {
2722 EXIT_IF(Py_TYPE(iter) != &PyTupleIter_Type);
2723 }
2724
2725 replaced op(_ITER_JUMP_TUPLE, (iter -- iter)) {
2726 _PyTupleIterObject *it = (_PyTupleIterObject *)iter;
2727 assert(Py_TYPE(iter) == &PyTupleIter_Type);
2728 STAT_INC(FOR_ITER, hit);
2729 PyTupleObject *seq = it->it_seq;
2730 if (seq == NULL || it->it_index >= PyTuple_GET_SIZE(seq)) {
2731 if (seq != NULL) {
2732 it->it_seq = NULL;
2733 Py_DECREF(seq);
2734 }
2735 Py_DECREF(iter);
2736 STACK_SHRINK(1);
2737 /* Jump forward oparg, then skip following END_FOR and POP_TOP instructions */
2738 JUMPBY(oparg + 2);
2739 DISPATCH();
2740 }
2741 }
2742
2743 // Only used by Tier 2
2744 op(_GUARD_NOT_EXHAUSTED_TUPLE, (iter -- iter)) {
2745 _PyTupleIterObject *it = (_PyTupleIterObject *)iter;
2746 assert(Py_TYPE(iter) == &PyTupleIter_Type);
2747 PyTupleObject *seq = it->it_seq;
2748 EXIT_IF(seq == NULL);
2749 EXIT_IF(it->it_index >= PyTuple_GET_SIZE(seq));
2750 }
2751
2752 op(_ITER_NEXT_TUPLE, (iter -- iter, next)) {
2753 _PyTupleIterObject *it = (_PyTupleIterObject *)iter;
2754 assert(Py_TYPE(iter) == &PyTupleIter_Type);
2755 PyTupleObject *seq = it->it_seq;
2756 assert(seq);
2757 assert(it->it_index < PyTuple_GET_SIZE(seq));
2758 next = Py_NewRef(PyTuple_GET_ITEM(seq, it->it_index++));
2759 }
2760
2761 macro(FOR_ITER_TUPLE) =
2762 unused/1 + // Skip over the counter
2763 _ITER_CHECK_TUPLE +
2764 _ITER_JUMP_TUPLE +
2765 _ITER_NEXT_TUPLE;
2766
2767 op(_ITER_CHECK_RANGE, (iter -- iter)) {
2768 _PyRangeIterObject *r = (_PyRangeIterObject *)iter;
2769 EXIT_IF(Py_TYPE(r) != &PyRangeIter_Type);
2770 }
2771
2772 replaced op(_ITER_JUMP_RANGE, (iter -- iter)) {
2773 _PyRangeIterObject *r = (_PyRangeIterObject *)iter;
2774 assert(Py_TYPE(r) == &PyRangeIter_Type);
2775 STAT_INC(FOR_ITER, hit);
2776 if (r->len <= 0) {
2777 STACK_SHRINK(1);
2778 Py_DECREF(r);
2779 // Jump over END_FOR and POP_TOP instructions.
2780 JUMPBY(oparg + 2);
2781 DISPATCH();
2782 }
2783 }
2784
2785 // Only used by Tier 2
2786 op(_GUARD_NOT_EXHAUSTED_RANGE, (iter -- iter)) {
2787 _PyRangeIterObject *r = (_PyRangeIterObject *)iter;
2788 assert(Py_TYPE(r) == &PyRangeIter_Type);
2789 EXIT_IF(r->len <= 0);
2790 }
2791
2792 op(_ITER_NEXT_RANGE, (iter -- iter, next)) {
2793 _PyRangeIterObject *r = (_PyRangeIterObject *)iter;
2794 assert(Py_TYPE(r) == &PyRangeIter_Type);
2795 assert(r->len > 0);
2796 long value = r->start;
2797 r->start = value + r->step;
2798 r->len--;
2799 next = PyLong_FromLong(value);
2800 ERROR_IF(next == NULL, error);
2801 }
2802
2803 macro(FOR_ITER_RANGE) =
2804 unused/1 + // Skip over the counter
2805 _ITER_CHECK_RANGE +
2806 _ITER_JUMP_RANGE +
2807 _ITER_NEXT_RANGE;
2808
2809 op(_FOR_ITER_GEN_FRAME, (iter -- iter, gen_frame: _PyInterpreterFrame*)) {
2810 PyGenObject *gen = (PyGenObject *)iter;
2811 DEOPT_IF(Py_TYPE(gen) != &PyGen_Type);
2812 DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING);
2813 STAT_INC(FOR_ITER, hit);
2814 gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
2815 _PyFrame_StackPush(gen_frame, Py_None);
2816 gen->gi_frame_state = FRAME_EXECUTING;
2817 gen->gi_exc_state.previous_item = tstate->exc_info;
2818 tstate->exc_info = &gen->gi_exc_state;
2819 // oparg is the return offset from the next instruction.
2820 frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_FOR_ITER + oparg);
2821 }
2822
2823 macro(FOR_ITER_GEN) =
2824 unused/1 +
2825 _CHECK_PEP_523 +
2826 _FOR_ITER_GEN_FRAME +
2827 _PUSH_FRAME;
2828
2829 inst(BEFORE_ASYNC_WITH, (mgr -- exit, res)) {
2830 PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__));
2831 if (enter == NULL) {
2832 if (!_PyErr_Occurred(tstate)) {
2833 _PyErr_Format(tstate, PyExc_TypeError,
2834 "'%.200s' object does not support the "
2835 "asynchronous context manager protocol",
2836 Py_TYPE(mgr)->tp_name);
2837 }
2838 ERROR_NO_POP();
2839 }
2840 exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__));
2841 if (exit == NULL) {
2842 if (!_PyErr_Occurred(tstate)) {
2843 _PyErr_Format(tstate, PyExc_TypeError,
2844 "'%.200s' object does not support the "
2845 "asynchronous context manager protocol "
2846 "(missed __aexit__ method)",
2847 Py_TYPE(mgr)->tp_name);
2848 }
2849 Py_DECREF(enter);
2850 ERROR_NO_POP();
2851 }
2852 DECREF_INPUTS();
2853 res = PyObject_CallNoArgs(enter);
2854 Py_DECREF(enter);
2855 if (res == NULL) {
2856 Py_DECREF(exit);
2857 ERROR_IF(true, error);
2858 }
2859 }
2860
2861 inst(BEFORE_WITH, (mgr -- exit, res)) {
2862 /* pop the context manager, push its __exit__ and the
2863 * value returned from calling its __enter__
2864 */
2865 PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__enter__));
2866 if (enter == NULL) {
2867 if (!_PyErr_Occurred(tstate)) {
2868 _PyErr_Format(tstate, PyExc_TypeError,
2869 "'%.200s' object does not support the "
2870 "context manager protocol",
2871 Py_TYPE(mgr)->tp_name);
2872 }
2873 ERROR_NO_POP();
2874 }
2875 exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__));
2876 if (exit == NULL) {
2877 if (!_PyErr_Occurred(tstate)) {
2878 _PyErr_Format(tstate, PyExc_TypeError,
2879 "'%.200s' object does not support the "
2880 "context manager protocol "
2881 "(missed __exit__ method)",
2882 Py_TYPE(mgr)->tp_name);
2883 }
2884 Py_DECREF(enter);
2885 ERROR_NO_POP();
2886 }
2887 DECREF_INPUTS();
2888 res = PyObject_CallNoArgs(enter);
2889 Py_DECREF(enter);
2890 if (res == NULL) {
2891 Py_DECREF(exit);
2892 ERROR_IF(true, error);
2893 }
2894 }
2895
2896 inst(WITH_EXCEPT_START, (exit_func, lasti, unused, val -- exit_func, lasti, unused, val, res)) {
2897 /* At the top of the stack are 4 values:
2898 - val: TOP = exc_info()
2899 - unused: SECOND = previous exception
2900 - lasti: THIRD = lasti of exception in exc_info()
2901 - exit_func: FOURTH = the context.__exit__ bound method
2902 We call FOURTH(type(TOP), TOP, GetTraceback(TOP)).
2903 Then we push the __exit__ return value.
2904 */
2905 PyObject *exc, *tb;
2906
2907 assert(val && PyExceptionInstance_Check(val));
2908 exc = PyExceptionInstance_Class(val);
2909 tb = PyException_GetTraceback(val);
2910 if (tb == NULL) {
2911 tb = Py_None;
2912 }
2913 else {
2914 Py_DECREF(tb);
2915 }
2916 assert(PyLong_Check(lasti));
2917 (void)lasti; // Shut up compiler warning if asserts are off
2918 PyObject *stack[4] = {NULL, exc, val, tb};
2919 res = PyObject_Vectorcall(exit_func, stack + 1,
2920 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL);
2921 ERROR_IF(res == NULL, error);
2922 }
2923
2924 pseudo(SETUP_FINALLY, (HAS_ARG)) = {
2925 NOP,
2926 };
2927
2928 pseudo(SETUP_CLEANUP, (HAS_ARG)) = {
2929 NOP,
2930 };
2931
2932 pseudo(SETUP_WITH, (HAS_ARG)) = {
2933 NOP,
2934 };
2935
2936 pseudo(POP_BLOCK) = {
2937 NOP,
2938 };
2939
2940 inst(PUSH_EXC_INFO, (new_exc -- prev_exc, new_exc)) {
2941 _PyErr_StackItem *exc_info = tstate->exc_info;
2942 if (exc_info->exc_value != NULL) {
2943 prev_exc = exc_info->exc_value;
2944 }
2945 else {
2946 prev_exc = Py_None;
2947 }
2948 assert(PyExceptionInstance_Check(new_exc));
2949 exc_info->exc_value = Py_NewRef(new_exc);
2950 }
2951
2952 op(_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, (owner -- owner)) {
2953 assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
2954 DEOPT_IF(!_PyObject_InlineValues(owner)->valid);
2955 }
2956
2957 op(_GUARD_KEYS_VERSION, (keys_version/2, owner -- owner)) {
2958 PyTypeObject *owner_cls = Py_TYPE(owner);
2959 PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls;
2960 DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version);
2961 }
2962
2963 split op(_LOAD_ATTR_METHOD_WITH_VALUES, (descr/4, owner -- attr, self if (1))) {
2964 assert(oparg & 1);
2965 /* Cached method object */
2966 STAT_INC(LOAD_ATTR, hit);
2967 assert(descr != NULL);
2968 attr = Py_NewRef(descr);
2969 assert(_PyType_HasFeature(Py_TYPE(attr), Py_TPFLAGS_METHOD_DESCRIPTOR));
2970 self = owner;
2971 }
2972
2973 macro(LOAD_ATTR_METHOD_WITH_VALUES) =
2974 unused/1 +
2975 _GUARD_TYPE_VERSION +
2976 _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT +
2977 _GUARD_KEYS_VERSION +
2978 _LOAD_ATTR_METHOD_WITH_VALUES;
2979
2980 op(_LOAD_ATTR_METHOD_NO_DICT, (descr/4, owner -- attr, self if (1))) {
2981 assert(oparg & 1);
2982 assert(Py_TYPE(owner)->tp_dictoffset == 0);
2983 STAT_INC(LOAD_ATTR, hit);
2984 assert(descr != NULL);
2985 assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR));
2986 attr = Py_NewRef(descr);
2987 self = owner;
2988 }
2989
2990 macro(LOAD_ATTR_METHOD_NO_DICT) =
2991 unused/1 +
2992 _GUARD_TYPE_VERSION +
2993 unused/2 +
2994 _LOAD_ATTR_METHOD_NO_DICT;
2995
2996 op(_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, (descr/4, owner -- attr, unused if (0))) {
2997 assert((oparg & 1) == 0);
2998 STAT_INC(LOAD_ATTR, hit);
2999 assert(descr != NULL);
3000 DECREF_INPUTS();
3001 attr = Py_NewRef(descr);
3002 }
3003
3004 macro(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES) =
3005 unused/1 +
3006 _GUARD_TYPE_VERSION +
3007 _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT +
3008 _GUARD_KEYS_VERSION +
3009 _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES;
3010
3011 op(_LOAD_ATTR_NONDESCRIPTOR_NO_DICT, (descr/4, owner -- attr, unused if (0))) {
3012 assert((oparg & 1) == 0);
3013 assert(Py_TYPE(owner)->tp_dictoffset == 0);
3014 STAT_INC(LOAD_ATTR, hit);
3015 assert(descr != NULL);
3016 DECREF_INPUTS();
3017 attr = Py_NewRef(descr);
3018 }
3019
3020 macro(LOAD_ATTR_NONDESCRIPTOR_NO_DICT) =
3021 unused/1 +
3022 _GUARD_TYPE_VERSION +
3023 unused/2 +
3024 _LOAD_ATTR_NONDESCRIPTOR_NO_DICT;
3025
3026 op(_CHECK_ATTR_METHOD_LAZY_DICT, (dictoffset/1, owner -- owner)) {
3027 char *ptr = ((char *)owner) + MANAGED_DICT_OFFSET + dictoffset;
3028 PyObject *dict = *(PyObject **)ptr;
3029 /* This object has a __dict__, just not yet created */
3030 DEOPT_IF(dict != NULL);
3031 }
3032
3033 op(_LOAD_ATTR_METHOD_LAZY_DICT, (descr/4, owner -- attr, self if (1))) {
3034 assert(oparg & 1);
3035 STAT_INC(LOAD_ATTR, hit);
3036 assert(descr != NULL);
3037 assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR));
3038 attr = Py_NewRef(descr);
3039 self = owner;
3040 }
3041
3042 macro(LOAD_ATTR_METHOD_LAZY_DICT) =
3043 unused/1 +
3044 _GUARD_TYPE_VERSION +
3045 _CHECK_ATTR_METHOD_LAZY_DICT +
3046 unused/1 +
3047 _LOAD_ATTR_METHOD_LAZY_DICT;
3048
3049 inst(INSTRUMENTED_CALL, (unused/3 -- )) {
3050 int is_meth = PEEK(oparg + 1) != NULL;
3051 int total_args = oparg + is_meth;
3052 PyObject *function = PEEK(oparg + 2);
3053 PyObject *arg = total_args == 0 ?
3054 &_PyInstrumentation_MISSING : PEEK(total_args);
3055 int err = _Py_call_instrumentation_2args(
3056 tstate, PY_MONITORING_EVENT_CALL,
3057 frame, this_instr, function, arg);
3058 ERROR_IF(err, error);
3059 PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter);
3060 GO_TO_INSTRUCTION(CALL);
3061 }
3062
3063 // Cache layout: counter/1, func_version/2
3064 // CALL_INTRINSIC_1/2, CALL_KW, and CALL_FUNCTION_EX aren't members!
3065 family(CALL, INLINE_CACHE_ENTRIES_CALL) = {
3066 CALL_BOUND_METHOD_EXACT_ARGS,
3067 CALL_PY_EXACT_ARGS,
3068 CALL_TYPE_1,
3069 CALL_STR_1,
3070 CALL_TUPLE_1,
3071 CALL_BUILTIN_CLASS,
3072 CALL_BUILTIN_O,
3073 CALL_BUILTIN_FAST,
3074 CALL_BUILTIN_FAST_WITH_KEYWORDS,
3075 CALL_LEN,
3076 CALL_ISINSTANCE,
3077 CALL_LIST_APPEND,
3078 CALL_METHOD_DESCRIPTOR_O,
3079 CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS,
3080 CALL_METHOD_DESCRIPTOR_NOARGS,
3081 CALL_METHOD_DESCRIPTOR_FAST,
3082 CALL_ALLOC_AND_ENTER_INIT,
3083 CALL_PY_GENERAL,
3084 CALL_BOUND_METHOD_GENERAL,
3085 CALL_NON_PY_GENERAL,
3086 };
3087
3088 specializing op(_SPECIALIZE_CALL, (counter/1, callable, self_or_null, args[oparg] -- callable, self_or_null, args[oparg])) {
3089 #if ENABLE_SPECIALIZATION
3090 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
3091 next_instr = this_instr;
3092 _Py_Specialize_Call(callable, next_instr, oparg + (self_or_null != NULL));
3093 DISPATCH_SAME_OPARG();
3094 }
3095 STAT_INC(CALL, deferred);
3096 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
3097 #endif /* ENABLE_SPECIALIZATION */
3098 }
3099
3100 // When calling Python, inline the call using DISPATCH_INLINED().
3101 op(_CALL, (callable, self_or_null, args[oparg] -- res)) {
3102 // oparg counts all of the args, but *not* self:
3103 int total_args = oparg;
3104 if (self_or_null != NULL) {
3105 args--;
3106 total_args++;
3107 }
3108 else if (Py_TYPE(callable) == &PyMethod_Type) {
3109 args--;
3110 total_args++;
3111 PyObject *self = ((PyMethodObject *)callable)->im_self;
3112 args[0] = Py_NewRef(self);
3113 PyObject *method = ((PyMethodObject *)callable)->im_func;
3114 args[-1] = Py_NewRef(method);
3115 Py_DECREF(callable);
3116 callable = method;
3117 }
3118 // Check if the call can be inlined or not
3119 if (Py_TYPE(callable) == &PyFunction_Type &&
3120 tstate->interp->eval_frame == NULL &&
3121 ((PyFunctionObject *)callable)->vectorcall == _PyFunction_Vectorcall)
3122 {
3123 int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable))->co_flags;
3124 PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable));
3125 _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
3126 tstate, (PyFunctionObject *)callable, locals,
3127 args, total_args, NULL
3128 );
3129 // Manipulate stack directly since we leave using DISPATCH_INLINED().
3130 STACK_SHRINK(oparg + 2);
3131 // The frame has stolen all the arguments from the stack,
3132 // so there is no need to clean them up.
3133 if (new_frame == NULL) {
3134 ERROR_NO_POP();
3135 }
3136 frame->return_offset = (uint16_t)(next_instr - this_instr);
3137 DISPATCH_INLINED(new_frame);
3138 }
3139 /* Callable is not a normal Python function */
3140 res = PyObject_Vectorcall(
3141 callable, args,
3142 total_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
3143 NULL);
3144 if (opcode == INSTRUMENTED_CALL) {
3145 PyObject *arg = total_args == 0 ?
3146 &_PyInstrumentation_MISSING : args[0];
3147 if (res == NULL) {
3148 _Py_call_instrumentation_exc2(
3149 tstate, PY_MONITORING_EVENT_C_RAISE,
3150 frame, this_instr, callable, arg);
3151 }
3152 else {
3153 int err = _Py_call_instrumentation_2args(
3154 tstate, PY_MONITORING_EVENT_C_RETURN,
3155 frame, this_instr, callable, arg);
3156 if (err < 0) {
3157 Py_CLEAR(res);
3158 }
3159 }
3160 }
3161 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3162 Py_DECREF(callable);
3163 for (int i = 0; i < total_args; i++) {
3164 Py_DECREF(args[i]);
3165 }
3166 ERROR_IF(res == NULL, error);
3167 }
3168
3169 op(_CHECK_PERIODIC, (--)) {
3170 CHECK_EVAL_BREAKER();
3171 }
3172
3173 macro(CALL) = _SPECIALIZE_CALL + unused/2 + _CALL + _CHECK_PERIODIC;
3174
3175 op(_PY_FRAME_GENERAL, (callable, self_or_null, args[oparg] -- new_frame: _PyInterpreterFrame*)) {
3176 // oparg counts all of the args, but *not* self:
3177 int total_args = oparg;
3178 if (self_or_null != NULL) {
3179 args--;
3180 total_args++;
3181 }
3182 assert(Py_TYPE(callable) == &PyFunction_Type);
3183 int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable))->co_flags;
3184 PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable));
3185 new_frame = _PyEvalFramePushAndInit(
3186 tstate, (PyFunctionObject *)callable, locals,
3187 args, total_args, NULL
3188 );
3189 // The frame has stolen all the arguments from the stack,
3190 // so there is no need to clean them up.
3191 SYNC_SP();
3192 if (new_frame == NULL) {
3193 ERROR_NO_POP();
3194 }
3195 }
3196
3197 op(_CHECK_FUNCTION_VERSION, (func_version/2, callable, unused, unused[oparg] -- callable, unused, unused[oparg])) {
3198 EXIT_IF(!PyFunction_Check(callable));
3199 PyFunctionObject *func = (PyFunctionObject *)callable;
3200 EXIT_IF(func->func_version != func_version);
3201 }
3202
3203 macro(CALL_PY_GENERAL) =
3204 unused/1 + // Skip over the counter
3205 _CHECK_PEP_523 +
3206 _CHECK_FUNCTION_VERSION +
3207 _PY_FRAME_GENERAL +
3208 _SAVE_RETURN_OFFSET +
3209 _PUSH_FRAME;
3210
3211 op(_CHECK_METHOD_VERSION, (func_version/2, callable, null, unused[oparg] -- callable, null, unused[oparg])) {
3212 EXIT_IF(Py_TYPE(callable) != &PyMethod_Type);
3213 PyObject *func = ((PyMethodObject *)callable)->im_func;
3214 EXIT_IF(!PyFunction_Check(func));
3215 EXIT_IF(((PyFunctionObject *)func)->func_version != func_version);
3216 EXIT_IF(null != NULL);
3217 }
3218
3219 op(_EXPAND_METHOD, (callable, null, unused[oparg] -- method, self, unused[oparg])) {
3220 assert(null == NULL);
3221 assert(Py_TYPE(callable) == &PyMethod_Type);
3222 self = ((PyMethodObject *)callable)->im_self;
3223 Py_INCREF(self);
3224 stack_pointer[-1 - oparg] = self; // Patch stack as it is used by _PY_FRAME_GENERAL
3225 method = ((PyMethodObject *)callable)->im_func;
3226 assert(PyFunction_Check(method));
3227 Py_INCREF(method);
3228 Py_DECREF(callable);
3229 }
3230
3231 macro(CALL_BOUND_METHOD_GENERAL) =
3232 unused/1 + // Skip over the counter
3233 _CHECK_PEP_523 +
3234 _CHECK_METHOD_VERSION +
3235 _EXPAND_METHOD +
3236 _PY_FRAME_GENERAL +
3237 _SAVE_RETURN_OFFSET +
3238 _PUSH_FRAME;
3239
3240 op(_CHECK_IS_NOT_PY_CALLABLE, (callable, unused, unused[oparg] -- callable, unused, unused[oparg])) {
3241 EXIT_IF(PyFunction_Check(callable));
3242 EXIT_IF(Py_TYPE(callable) == &PyMethod_Type);
3243 }
3244
3245 op(_CALL_NON_PY_GENERAL, (callable, self_or_null, args[oparg] -- res)) {
3246 #if TIER_ONE
3247 assert(opcode != INSTRUMENTED_CALL);
3248 #endif
3249 int total_args = oparg;
3250 if (self_or_null != NULL) {
3251 args--;
3252 total_args++;
3253 }
3254 /* Callable is not a normal Python function */
3255 res = PyObject_Vectorcall(
3256 callable, args,
3257 total_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
3258 NULL);
3259 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3260 Py_DECREF(callable);
3261 for (int i = 0; i < total_args; i++) {
3262 Py_DECREF(args[i]);
3263 }
3264 ERROR_IF(res == NULL, error);
3265 }
3266
3267 macro(CALL_NON_PY_GENERAL) =
3268 unused/1 + // Skip over the counter
3269 unused/2 +
3270 _CHECK_IS_NOT_PY_CALLABLE +
3271 _CALL_NON_PY_GENERAL +
3272 _CHECK_PERIODIC;
3273
3274 op(_CHECK_CALL_BOUND_METHOD_EXACT_ARGS, (callable, null, unused[oparg] -- callable, null, unused[oparg])) {
3275 EXIT_IF(null != NULL);
3276 EXIT_IF(Py_TYPE(callable) != &PyMethod_Type);
3277 }
3278
3279 op(_INIT_CALL_BOUND_METHOD_EXACT_ARGS, (callable, unused, unused[oparg] -- func, self, unused[oparg])) {
3280 STAT_INC(CALL, hit);
3281 self = Py_NewRef(((PyMethodObject *)callable)->im_self);
3282 stack_pointer[-1 - oparg] = self; // Patch stack as it is used by _INIT_CALL_PY_EXACT_ARGS
3283 func = Py_NewRef(((PyMethodObject *)callable)->im_func);
3284 stack_pointer[-2 - oparg] = func; // This is used by CALL, upon deoptimization
3285 Py_DECREF(callable);
3286 }
3287
3288 op(_CHECK_PEP_523, (--)) {
3289 DEOPT_IF(tstate->interp->eval_frame);
3290 }
3291
3292 op(_CHECK_FUNCTION_EXACT_ARGS, (func_version/2, callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) {
3293 EXIT_IF(!PyFunction_Check(callable));
3294 PyFunctionObject *func = (PyFunctionObject *)callable;
3295 EXIT_IF(func->func_version != func_version);
3296 PyCodeObject *code = (PyCodeObject *)func->func_code;
3297 EXIT_IF(code->co_argcount != oparg + (self_or_null != NULL));
3298 }
3299
3300 op(_CHECK_STACK_SPACE, (callable, unused, unused[oparg] -- callable, unused, unused[oparg])) {
3301 PyFunctionObject *func = (PyFunctionObject *)callable;
3302 PyCodeObject *code = (PyCodeObject *)func->func_code;
3303 DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize));
3304 DEOPT_IF(tstate->py_recursion_remaining <= 1);
3305 }
3306
3307 replicate(5) pure op(_INIT_CALL_PY_EXACT_ARGS, (callable, self_or_null, args[oparg] -- new_frame: _PyInterpreterFrame*)) {
3308 int has_self = (self_or_null != NULL);
3309 STAT_INC(CALL, hit);
3310 PyFunctionObject *func = (PyFunctionObject *)callable;
3311 new_frame = _PyFrame_PushUnchecked(tstate, func, oparg + has_self);
3312 PyObject **first_non_self_local = new_frame->localsplus + has_self;
3313 new_frame->localsplus[0] = self_or_null;
3314 for (int i = 0; i < oparg; i++) {
3315 first_non_self_local[i] = args[i];
3316 }
3317 }
3318
3319 op(_PUSH_FRAME, (new_frame: _PyInterpreterFrame* -- )) {
3320 // Write it out explicitly because it's subtly different.
3321 // Eventually this should be the only occurrence of this code.
3322 assert(tstate->interp->eval_frame == NULL);
3323 SYNC_SP();
3324 _PyFrame_SetStackPointer(frame, stack_pointer);
3325 new_frame->previous = frame;
3326 CALL_STAT_INC(inlined_py_calls);
3327 frame = tstate->current_frame = new_frame;
3328 tstate->py_recursion_remaining--;
3329 LOAD_SP();
3330 LOAD_IP(0);
3331 LLTRACE_RESUME_FRAME();
3332 }
3333
3334 macro(CALL_BOUND_METHOD_EXACT_ARGS) =
3335 unused/1 + // Skip over the counter
3336 _CHECK_PEP_523 +
3337 _CHECK_CALL_BOUND_METHOD_EXACT_ARGS +
3338 _INIT_CALL_BOUND_METHOD_EXACT_ARGS +
3339 _CHECK_FUNCTION_EXACT_ARGS +
3340 _CHECK_STACK_SPACE +
3341 _INIT_CALL_PY_EXACT_ARGS +
3342 _SAVE_RETURN_OFFSET +
3343 _PUSH_FRAME;
3344
3345 macro(CALL_PY_EXACT_ARGS) =
3346 unused/1 + // Skip over the counter
3347 _CHECK_PEP_523 +
3348 _CHECK_FUNCTION_EXACT_ARGS +
3349 _CHECK_STACK_SPACE +
3350 _INIT_CALL_PY_EXACT_ARGS +
3351 _SAVE_RETURN_OFFSET +
3352 _PUSH_FRAME;
3353
3354 inst(CALL_TYPE_1, (unused/1, unused/2, callable, null, arg -- res)) {
3355 assert(oparg == 1);
3356 DEOPT_IF(null != NULL);
3357 DEOPT_IF(callable != (PyObject *)&PyType_Type);
3358 STAT_INC(CALL, hit);
3359 res = Py_NewRef(Py_TYPE(arg));
3360 Py_DECREF(arg);
3361 }
3362
3363 op(_CALL_STR_1, (callable, null, arg -- res)) {
3364 assert(oparg == 1);
3365 DEOPT_IF(null != NULL);
3366 DEOPT_IF(callable != (PyObject *)&PyUnicode_Type);
3367 STAT_INC(CALL, hit);
3368 res = PyObject_Str(arg);
3369 Py_DECREF(arg);
3370 ERROR_IF(res == NULL, error);
3371 }
3372
3373 macro(CALL_STR_1) =
3374 unused/1 +
3375 unused/2 +
3376 _CALL_STR_1 +
3377 _CHECK_PERIODIC;
3378
3379 op(_CALL_TUPLE_1, (callable, null, arg -- res)) {
3380 assert(oparg == 1);
3381 DEOPT_IF(null != NULL);
3382 DEOPT_IF(callable != (PyObject *)&PyTuple_Type);
3383 STAT_INC(CALL, hit);
3384 res = PySequence_Tuple(arg);
3385 Py_DECREF(arg);
3386 ERROR_IF(res == NULL, error);
3387 }
3388
3389 macro(CALL_TUPLE_1) =
3390 unused/1 +
3391 unused/2 +
3392 _CALL_TUPLE_1 +
3393 _CHECK_PERIODIC;
3394
3395 inst(CALL_ALLOC_AND_ENTER_INIT, (unused/1, unused/2, callable, null, args[oparg] -- unused)) {
3396 /* This instruction does the following:
3397 * 1. Creates the object (by calling ``object.__new__``)
3398 * 2. Pushes a shim frame to the frame stack (to cleanup after ``__init__``)
3399 * 3. Pushes the frame for ``__init__`` to the frame stack
3400 * */
3401 _PyCallCache *cache = (_PyCallCache *)&this_instr[1];
3402 DEOPT_IF(null != NULL);
3403 DEOPT_IF(!PyType_Check(callable));
3404 PyTypeObject *tp = (PyTypeObject *)callable;
3405 DEOPT_IF(tp->tp_version_tag != read_u32(cache->func_version));
3406 assert(tp->tp_flags & Py_TPFLAGS_INLINE_VALUES);
3407 PyHeapTypeObject *cls = (PyHeapTypeObject *)callable;
3408 PyFunctionObject *init = (PyFunctionObject *)cls->_spec_cache.init;
3409 PyCodeObject *code = (PyCodeObject *)init->func_code;
3410 DEOPT_IF(code->co_argcount != oparg+1);
3411 DEOPT_IF((code->co_flags & (CO_VARKEYWORDS | CO_VARARGS | CO_OPTIMIZED)) != CO_OPTIMIZED);
3412 DEOPT_IF(code->co_kwonlyargcount);
3413 DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize + _Py_InitCleanup.co_framesize));
3414 STAT_INC(CALL, hit);
3415 PyObject *self = _PyType_NewManagedObject(tp);
3416 if (self == NULL) {
3417 ERROR_NO_POP();
3418 }
3419 Py_DECREF(tp);
3420 _PyInterpreterFrame *shim = _PyFrame_PushTrampolineUnchecked(
3421 tstate, (PyCodeObject *)&_Py_InitCleanup, 1);
3422 assert(_PyCode_CODE((PyCodeObject *)shim->f_executable)[0].op.code == EXIT_INIT_CHECK);
3423 /* Push self onto stack of shim */
3424 Py_INCREF(self);
3425 shim->localsplus[0] = self;
3426 Py_INCREF(init);
3427 _PyInterpreterFrame *init_frame = _PyFrame_PushUnchecked(tstate, init, oparg+1);
3428 /* Copy self followed by args to __init__ frame */
3429 init_frame->localsplus[0] = self;
3430 for (int i = 0; i < oparg; i++) {
3431 init_frame->localsplus[i+1] = args[i];
3432 }
3433 frame->return_offset = (uint16_t)(next_instr - this_instr);
3434 STACK_SHRINK(oparg+2);
3435 _PyFrame_SetStackPointer(frame, stack_pointer);
3436 /* Link frames */
3437 init_frame->previous = shim;
3438 shim->previous = frame;
3439 frame = tstate->current_frame = init_frame;
3440 CALL_STAT_INC(inlined_py_calls);
3441 /* Account for pushing the extra frame.
3442 * We don't check recursion depth here,
3443 * as it will be checked after start_frame */
3444 tstate->py_recursion_remaining--;
3445 goto start_frame;
3446 }
3447
3448 inst(EXIT_INIT_CHECK, (should_be_none -- )) {
3449 assert(STACK_LEVEL() == 2);
3450 if (should_be_none != Py_None) {
3451 PyErr_Format(PyExc_TypeError,
3452 "__init__() should return None, not '%.200s'",
3453 Py_TYPE(should_be_none)->tp_name);
3454 ERROR_NO_POP();
3455 }
3456 }
3457
3458 op(_CALL_BUILTIN_CLASS, (callable, self_or_null, args[oparg] -- res)) {
3459 int total_args = oparg;
3460 if (self_or_null != NULL) {
3461 args--;
3462 total_args++;
3463 }
3464 DEOPT_IF(!PyType_Check(callable));
3465 PyTypeObject *tp = (PyTypeObject *)callable;
3466 DEOPT_IF(tp->tp_vectorcall == NULL);
3467 STAT_INC(CALL, hit);
3468 res = tp->tp_vectorcall((PyObject *)tp, args, total_args, NULL);
3469 /* Free the arguments. */
3470 for (int i = 0; i < total_args; i++) {
3471 Py_DECREF(args[i]);
3472 }
3473 Py_DECREF(tp);
3474 ERROR_IF(res == NULL, error);
3475 }
3476
3477 macro(CALL_BUILTIN_CLASS) =
3478 unused/1 +
3479 unused/2 +
3480 _CALL_BUILTIN_CLASS +
3481 _CHECK_PERIODIC;
3482
3483 op(_CALL_BUILTIN_O, (callable, self_or_null, args[oparg] -- res)) {
3484 /* Builtin METH_O functions */
3485 int total_args = oparg;
3486 if (self_or_null != NULL) {
3487 args--;
3488 total_args++;
3489 }
3490 DEOPT_IF(total_args != 1);
3491 DEOPT_IF(!PyCFunction_CheckExact(callable));
3492 DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_O);
3493 // CPython promises to check all non-vectorcall function calls.
3494 DEOPT_IF(tstate->c_recursion_remaining <= 0);
3495 STAT_INC(CALL, hit);
3496 PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable);
3497 PyObject *arg = args[0];
3498 _Py_EnterRecursiveCallTstateUnchecked(tstate);
3499 res = _PyCFunction_TrampolineCall(cfunc, PyCFunction_GET_SELF(callable), arg);
3500 _Py_LeaveRecursiveCallTstate(tstate);
3501 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3502
3503 Py_DECREF(arg);
3504 Py_DECREF(callable);
3505 ERROR_IF(res == NULL, error);
3506 }
3507
3508 macro(CALL_BUILTIN_O) =
3509 unused/1 +
3510 unused/2 +
3511 _CALL_BUILTIN_O +
3512 _CHECK_PERIODIC;
3513
3514 op(_CALL_BUILTIN_FAST, (callable, self_or_null, args[oparg] -- res)) {
3515 /* Builtin METH_FASTCALL functions, without keywords */
3516 int total_args = oparg;
3517 if (self_or_null != NULL) {
3518 args--;
3519 total_args++;
3520 }
3521 DEOPT_IF(!PyCFunction_CheckExact(callable));
3522 DEOPT_IF(PyCFunction_GET_FLAGS(callable) != METH_FASTCALL);
3523 STAT_INC(CALL, hit);
3524 PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable);
3525 /* res = func(self, args, nargs) */
3526 res = ((PyCFunctionFast)(void(*)(void))cfunc)(
3527 PyCFunction_GET_SELF(callable),
3528 args,
3529 total_args);
3530 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3531
3532 /* Free the arguments. */
3533 for (int i = 0; i < total_args; i++) {
3534 Py_DECREF(args[i]);
3535 }
3536 Py_DECREF(callable);
3537 ERROR_IF(res == NULL, error);
3538 }
3539
3540 macro(CALL_BUILTIN_FAST) =
3541 unused/1 +
3542 unused/2 +
3543 _CALL_BUILTIN_FAST +
3544 _CHECK_PERIODIC;
3545
3546 op(_CALL_BUILTIN_FAST_WITH_KEYWORDS, (callable, self_or_null, args[oparg] -- res)) {
3547 /* Builtin METH_FASTCALL | METH_KEYWORDS functions */
3548 int total_args = oparg;
3549 if (self_or_null != NULL) {
3550 args--;
3551 total_args++;
3552 }
3553 DEOPT_IF(!PyCFunction_CheckExact(callable));
3554 DEOPT_IF(PyCFunction_GET_FLAGS(callable) != (METH_FASTCALL | METH_KEYWORDS));
3555 STAT_INC(CALL, hit);
3556 /* res = func(self, args, nargs, kwnames) */
3557 PyCFunctionFastWithKeywords cfunc =
3558 (PyCFunctionFastWithKeywords)(void(*)(void))
3559 PyCFunction_GET_FUNCTION(callable);
3560 res = cfunc(PyCFunction_GET_SELF(callable), args, total_args, NULL);
3561 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3562
3563 /* Free the arguments. */
3564 for (int i = 0; i < total_args; i++) {
3565 Py_DECREF(args[i]);
3566 }
3567 Py_DECREF(callable);
3568 ERROR_IF(res == NULL, error);
3569 }
3570
3571 macro(CALL_BUILTIN_FAST_WITH_KEYWORDS) =
3572 unused/1 +
3573 unused/2 +
3574 _CALL_BUILTIN_FAST_WITH_KEYWORDS +
3575 _CHECK_PERIODIC;
3576
3577 inst(CALL_LEN, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) {
3578 /* len(o) */
3579 int total_args = oparg;
3580 if (self_or_null != NULL) {
3581 args--;
3582 total_args++;
3583 }
3584 DEOPT_IF(total_args != 1);
3585 PyInterpreterState *interp = tstate->interp;
3586 DEOPT_IF(callable != interp->callable_cache.len);
3587 STAT_INC(CALL, hit);
3588 PyObject *arg = args[0];
3589 Py_ssize_t len_i = PyObject_Length(arg);
3590 if (len_i < 0) {
3591 ERROR_NO_POP();
3592 }
3593 res = PyLong_FromSsize_t(len_i);
3594 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3595 if (res == NULL) {
3596 GOTO_ERROR(error);
3597 }
3598 Py_DECREF(callable);
3599 Py_DECREF(arg);
3600 }
3601
3602 inst(CALL_ISINSTANCE, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) {
3603 /* isinstance(o, o2) */
3604 int total_args = oparg;
3605 if (self_or_null != NULL) {
3606 args--;
3607 total_args++;
3608 }
3609 DEOPT_IF(total_args != 2);
3610 PyInterpreterState *interp = tstate->interp;
3611 DEOPT_IF(callable != interp->callable_cache.isinstance);
3612 STAT_INC(CALL, hit);
3613 PyObject *cls = args[1];
3614 PyObject *inst = args[0];
3615 int retval = PyObject_IsInstance(inst, cls);
3616 if (retval < 0) {
3617 ERROR_NO_POP();
3618 }
3619 res = PyBool_FromLong(retval);
3620 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3621 if (res == NULL) {
3622 GOTO_ERROR(error);
3623 }
3624 Py_DECREF(inst);
3625 Py_DECREF(cls);
3626 Py_DECREF(callable);
3627 }
3628
3629 // This is secretly a super-instruction
3630 tier1 inst(CALL_LIST_APPEND, (unused/1, unused/2, callable, self, arg -- unused)) {
3631 assert(oparg == 1);
3632 PyInterpreterState *interp = tstate->interp;
3633 DEOPT_IF(callable != interp->callable_cache.list_append);
3634 assert(self != NULL);
3635 DEOPT_IF(!PyList_Check(self));
3636 STAT_INC(CALL, hit);
3637 if (_PyList_AppendTakeRef((PyListObject *)self, arg) < 0) {
3638 goto pop_1_error; // Since arg is DECREF'ed already
3639 }
3640 Py_DECREF(self);
3641 Py_DECREF(callable);
3642 STACK_SHRINK(3);
3643 // Skip POP_TOP
3644 assert(next_instr->op.code == POP_TOP);
3645 SKIP_OVER(1);
3646 DISPATCH();
3647 }
3648
3649 op(_CALL_METHOD_DESCRIPTOR_O, (callable, self_or_null, args[oparg] -- res)) {
3650 int total_args = oparg;
3651 if (self_or_null != NULL) {
3652 args--;
3653 total_args++;
3654 }
3655 PyMethodDescrObject *method = (PyMethodDescrObject *)callable;
3656 DEOPT_IF(total_args != 2);
3657 DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type));
3658 PyMethodDef *meth = method->d_method;
3659 DEOPT_IF(meth->ml_flags != METH_O);
3660 // CPython promises to check all non-vectorcall function calls.
3661 DEOPT_IF(tstate->c_recursion_remaining <= 0);
3662 PyObject *arg = args[1];
3663 PyObject *self = args[0];
3664 DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type));
3665 STAT_INC(CALL, hit);
3666 PyCFunction cfunc = meth->ml_meth;
3667 _Py_EnterRecursiveCallTstateUnchecked(tstate);
3668 res = _PyCFunction_TrampolineCall(cfunc, self, arg);
3669 _Py_LeaveRecursiveCallTstate(tstate);
3670 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3671 Py_DECREF(self);
3672 Py_DECREF(arg);
3673 Py_DECREF(callable);
3674 ERROR_IF(res == NULL, error);
3675 }
3676
3677 macro(CALL_METHOD_DESCRIPTOR_O) =
3678 unused/1 +
3679 unused/2 +
3680 _CALL_METHOD_DESCRIPTOR_O +
3681 _CHECK_PERIODIC;
3682
3683 op(_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, (callable, self_or_null, args[oparg] -- res)) {
3684 int total_args = oparg;
3685 if (self_or_null != NULL) {
3686 args--;
3687 total_args++;
3688 }
3689 PyMethodDescrObject *method = (PyMethodDescrObject *)callable;
3690 DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type));
3691 PyMethodDef *meth = method->d_method;
3692 DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS));
3693 PyTypeObject *d_type = method->d_common.d_type;
3694 PyObject *self = args[0];
3695 DEOPT_IF(!Py_IS_TYPE(self, d_type));
3696 STAT_INC(CALL, hit);
3697 int nargs = total_args - 1;
3698 PyCFunctionFastWithKeywords cfunc =
3699 (PyCFunctionFastWithKeywords)(void(*)(void))meth->ml_meth;
3700 res = cfunc(self, args + 1, nargs, NULL);
3701 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3702
3703 /* Free the arguments. */
3704 for (int i = 0; i < total_args; i++) {
3705 Py_DECREF(args[i]);
3706 }
3707 Py_DECREF(callable);
3708 ERROR_IF(res == NULL, error);
3709 }
3710
3711 macro(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) =
3712 unused/1 +
3713 unused/2 +
3714 _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS +
3715 _CHECK_PERIODIC;
3716
3717 op(_CALL_METHOD_DESCRIPTOR_NOARGS, (callable, self_or_null, args[oparg] -- res)) {
3718 assert(oparg == 0 || oparg == 1);
3719 int total_args = oparg;
3720 if (self_or_null != NULL) {
3721 args--;
3722 total_args++;
3723 }
3724 DEOPT_IF(total_args != 1);
3725 PyMethodDescrObject *method = (PyMethodDescrObject *)callable;
3726 DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type));
3727 PyMethodDef *meth = method->d_method;
3728 PyObject *self = args[0];
3729 DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type));
3730 DEOPT_IF(meth->ml_flags != METH_NOARGS);
3731 // CPython promises to check all non-vectorcall function calls.
3732 DEOPT_IF(tstate->c_recursion_remaining <= 0);
3733 STAT_INC(CALL, hit);
3734 PyCFunction cfunc = meth->ml_meth;
3735 _Py_EnterRecursiveCallTstateUnchecked(tstate);
3736 res = _PyCFunction_TrampolineCall(cfunc, self, NULL);
3737 _Py_LeaveRecursiveCallTstate(tstate);
3738 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3739 Py_DECREF(self);
3740 Py_DECREF(callable);
3741 ERROR_IF(res == NULL, error);
3742 }
3743
3744 macro(CALL_METHOD_DESCRIPTOR_NOARGS) =
3745 unused/1 +
3746 unused/2 +
3747 _CALL_METHOD_DESCRIPTOR_NOARGS +
3748 _CHECK_PERIODIC;
3749
3750 op(_CALL_METHOD_DESCRIPTOR_FAST, (callable, self_or_null, args[oparg] -- res)) {
3751 int total_args = oparg;
3752 if (self_or_null != NULL) {
3753 args--;
3754 total_args++;
3755 }
3756 PyMethodDescrObject *method = (PyMethodDescrObject *)callable;
3757 /* Builtin METH_FASTCALL methods, without keywords */
3758 DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type));
3759 PyMethodDef *meth = method->d_method;
3760 DEOPT_IF(meth->ml_flags != METH_FASTCALL);
3761 PyObject *self = args[0];
3762 DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type));
3763 STAT_INC(CALL, hit);
3764 PyCFunctionFast cfunc =
3765 (PyCFunctionFast)(void(*)(void))meth->ml_meth;
3766 int nargs = total_args - 1;
3767 res = cfunc(self, args + 1, nargs);
3768 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3769 /* Clear the stack of the arguments. */
3770 for (int i = 0; i < total_args; i++) {
3771 Py_DECREF(args[i]);
3772 }
3773 Py_DECREF(callable);
3774 ERROR_IF(res == NULL, error);
3775 }
3776
3777 macro(CALL_METHOD_DESCRIPTOR_FAST) =
3778 unused/1 +
3779 unused/2 +
3780 _CALL_METHOD_DESCRIPTOR_FAST +
3781 _CHECK_PERIODIC;
3782
3783 inst(INSTRUMENTED_CALL_KW, ( -- )) {
3784 int is_meth = PEEK(oparg + 2) != NULL;
3785 int total_args = oparg + is_meth;
3786 PyObject *function = PEEK(oparg + 3);
3787 PyObject *arg = total_args == 0 ? &_PyInstrumentation_MISSING
3788 : PEEK(total_args + 1);
3789 int err = _Py_call_instrumentation_2args(
3790 tstate, PY_MONITORING_EVENT_CALL,
3791 frame, this_instr, function, arg);
3792 ERROR_IF(err, error);
3793 GO_TO_INSTRUCTION(CALL_KW);
3794 }
3795
3796 inst(CALL_KW, (callable, self_or_null, args[oparg], kwnames -- res)) {
3797 // oparg counts all of the args, but *not* self:
3798 int total_args = oparg;
3799 if (self_or_null != NULL) {
3800 args--;
3801 total_args++;
3802 }
3803 if (self_or_null == NULL && Py_TYPE(callable) == &PyMethod_Type) {
3804 args--;
3805 total_args++;
3806 PyObject *self = ((PyMethodObject *)callable)->im_self;
3807 args[0] = Py_NewRef(self);
3808 PyObject *method = ((PyMethodObject *)callable)->im_func;
3809 args[-1] = Py_NewRef(method);
3810 Py_DECREF(callable);
3811 callable = method;
3812 }
3813 int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames);
3814 // Check if the call can be inlined or not
3815 if (Py_TYPE(callable) == &PyFunction_Type &&
3816 tstate->interp->eval_frame == NULL &&
3817 ((PyFunctionObject *)callable)->vectorcall == _PyFunction_Vectorcall)
3818 {
3819 int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable))->co_flags;
3820 PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable));
3821 _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
3822 tstate, (PyFunctionObject *)callable, locals,
3823 args, positional_args, kwnames
3824 );
3825 Py_DECREF(kwnames);
3826 // Manipulate stack directly since we leave using DISPATCH_INLINED().
3827 STACK_SHRINK(oparg + 3);
3828 // The frame has stolen all the arguments from the stack,
3829 // so there is no need to clean them up.
3830 if (new_frame == NULL) {
3831 ERROR_NO_POP();
3832 }
3833 assert(next_instr - this_instr == 1);
3834 frame->return_offset = 1;
3835 DISPATCH_INLINED(new_frame);
3836 }
3837 /* Callable is not a normal Python function */
3838 res = PyObject_Vectorcall(
3839 callable, args,
3840 positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
3841 kwnames);
3842 if (opcode == INSTRUMENTED_CALL_KW) {
3843 PyObject *arg = total_args == 0 ?
3844 &_PyInstrumentation_MISSING : args[0];
3845 if (res == NULL) {
3846 _Py_call_instrumentation_exc2(
3847 tstate, PY_MONITORING_EVENT_C_RAISE,
3848 frame, this_instr, callable, arg);
3849 }
3850 else {
3851 int err = _Py_call_instrumentation_2args(
3852 tstate, PY_MONITORING_EVENT_C_RETURN,
3853 frame, this_instr, callable, arg);
3854 if (err < 0) {
3855 Py_CLEAR(res);
3856 }
3857 }
3858 }
3859 Py_DECREF(kwnames);
3860 assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
3861 Py_DECREF(callable);
3862 for (int i = 0; i < total_args; i++) {
3863 Py_DECREF(args[i]);
3864 }
3865 ERROR_IF(res == NULL, error);
3866 CHECK_EVAL_BREAKER();
3867 }
3868
3869 inst(INSTRUMENTED_CALL_FUNCTION_EX, ( -- )) {
3870 GO_TO_INSTRUCTION(CALL_FUNCTION_EX);
3871 }
3872
3873 inst(CALL_FUNCTION_EX, (func, unused, callargs, kwargs if (oparg & 1) -- result)) {
3874 // DICT_MERGE is called before this opcode if there are kwargs.
3875 // It converts all dict subtypes in kwargs into regular dicts.
3876 assert(kwargs == NULL || PyDict_CheckExact(kwargs));
3877 if (!PyTuple_CheckExact(callargs)) {
3878 if (check_args_iterable(tstate, func, callargs) < 0) {
3879 ERROR_NO_POP();
3880 }
3881 PyObject *tuple = PySequence_Tuple(callargs);
3882 if (tuple == NULL) {
3883 ERROR_NO_POP();
3884 }
3885 Py_SETREF(callargs, tuple);
3886 }
3887 assert(PyTuple_CheckExact(callargs));
3888 EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func);
3889 if (opcode == INSTRUMENTED_CALL_FUNCTION_EX) {
3890 PyObject *arg = PyTuple_GET_SIZE(callargs) > 0 ?
3891 PyTuple_GET_ITEM(callargs, 0) : &_PyInstrumentation_MISSING;
3892 int err = _Py_call_instrumentation_2args(
3893 tstate, PY_MONITORING_EVENT_CALL,
3894 frame, this_instr, func, arg);
3895 if (err) ERROR_NO_POP();
3896 result = PyObject_Call(func, callargs, kwargs);
3897
3898 if (!PyFunction_Check(func) && !PyMethod_Check(func)) {
3899 if (result == NULL) {
3900 _Py_call_instrumentation_exc2(
3901 tstate, PY_MONITORING_EVENT_C_RAISE,
3902 frame, this_instr, func, arg);
3903 }
3904 else {
3905 int err = _Py_call_instrumentation_2args(
3906 tstate, PY_MONITORING_EVENT_C_RETURN,
3907 frame, this_instr, func, arg);
3908 if (err < 0) {
3909 Py_CLEAR(result);
3910 }
3911 }
3912 }
3913 }
3914 else {
3915 if (Py_TYPE(func) == &PyFunction_Type &&
3916 tstate->interp->eval_frame == NULL &&
3917 ((PyFunctionObject *)func)->vectorcall == _PyFunction_Vectorcall) {
3918 assert(PyTuple_CheckExact(callargs));
3919 Py_ssize_t nargs = PyTuple_GET_SIZE(callargs);
3920 int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags;
3921 PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func));
3922
3923 _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex(tstate,
3924 (PyFunctionObject *)func, locals,
3925 nargs, callargs, kwargs);
3926 // Need to manually shrink the stack since we exit with DISPATCH_INLINED.
3927 STACK_SHRINK(oparg + 3);
3928 if (new_frame == NULL) {
3929 ERROR_NO_POP();
3930 }
3931 assert(next_instr - this_instr == 1);
3932 frame->return_offset = 1;
3933 DISPATCH_INLINED(new_frame);
3934 }
3935 result = PyObject_Call(func, callargs, kwargs);
3936 }
3937 DECREF_INPUTS();
3938 assert(PEEK(2 + (oparg & 1)) == NULL);
3939 ERROR_IF(result == NULL, error);
3940 CHECK_EVAL_BREAKER();
3941 }
3942
3943 inst(MAKE_FUNCTION, (codeobj -- func)) {
3944
3945 PyFunctionObject *func_obj = (PyFunctionObject *)
3946 PyFunction_New(codeobj, GLOBALS());
3947
3948 Py_DECREF(codeobj);
3949 if (func_obj == NULL) {
3950 ERROR_NO_POP();
3951 }
3952
3953 _PyFunction_SetVersion(
3954 func_obj, ((PyCodeObject *)codeobj)->co_version);
3955 func = (PyObject *)func_obj;
3956 }
3957
3958 inst(SET_FUNCTION_ATTRIBUTE, (attr, func -- func)) {
3959 assert(PyFunction_Check(func));
3960 PyFunctionObject *func_obj = (PyFunctionObject *)func;
3961 switch(oparg) {
3962 case MAKE_FUNCTION_CLOSURE:
3963 assert(func_obj->func_closure == NULL);
3964 func_obj->func_closure = attr;
3965 break;
3966 case MAKE_FUNCTION_ANNOTATIONS:
3967 assert(func_obj->func_annotations == NULL);
3968 func_obj->func_annotations = attr;
3969 break;
3970 case MAKE_FUNCTION_KWDEFAULTS:
3971 assert(PyDict_CheckExact(attr));
3972 assert(func_obj->func_kwdefaults == NULL);
3973 func_obj->func_kwdefaults = attr;
3974 break;
3975 case MAKE_FUNCTION_DEFAULTS:
3976 assert(PyTuple_CheckExact(attr));
3977 assert(func_obj->func_defaults == NULL);
3978 func_obj->func_defaults = attr;
3979 break;
3980 default:
3981 Py_UNREACHABLE();
3982 }
3983 }
3984
3985 inst(RETURN_GENERATOR, (-- res)) {
3986 assert(PyFunction_Check(frame->f_funcobj));
3987 PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj;
3988 PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func);
3989 if (gen == NULL) {
3990 ERROR_NO_POP();
3991 }
3992 assert(EMPTY());
3993 _PyFrame_SetStackPointer(frame, stack_pointer);
3994 _PyInterpreterFrame *gen_frame = (_PyInterpreterFrame *)gen->gi_iframe;
3995 frame->instr_ptr++;
3996 _PyFrame_Copy(frame, gen_frame);
3997 assert(frame->frame_obj == NULL);
3998 gen->gi_frame_state = FRAME_CREATED;
3999 gen_frame->owner = FRAME_OWNED_BY_GENERATOR;
4000 _Py_LeaveRecursiveCallPy(tstate);
4001 res = (PyObject *)gen;
4002 _PyInterpreterFrame *prev = frame->previous;
4003 _PyThreadState_PopFrame(tstate, frame);
4004 frame = tstate->current_frame = prev;
4005 LOAD_IP(frame->return_offset);
4006 LOAD_SP();
4007 LLTRACE_RESUME_FRAME();
4008 }
4009
4010 inst(BUILD_SLICE, (start, stop, step if (oparg == 3) -- slice)) {
4011 slice = PySlice_New(start, stop, step);
4012 DECREF_INPUTS();
4013 ERROR_IF(slice == NULL, error);
4014 }
4015
4016 inst(CONVERT_VALUE, (value -- result)) {
4017 conversion_func conv_fn;
4018 assert(oparg >= FVC_STR && oparg <= FVC_ASCII);
4019 conv_fn = _PyEval_ConversionFuncs[oparg];
4020 result = conv_fn(value);
4021 Py_DECREF(value);
4022 ERROR_IF(result == NULL, error);
4023 }
4024
4025 inst(FORMAT_SIMPLE, (value -- res)) {
4026 /* If value is a unicode object, then we know the result
4027 * of format(value) is value itself. */
4028 if (!PyUnicode_CheckExact(value)) {
4029 res = PyObject_Format(value, NULL);
4030 Py_DECREF(value);
4031 ERROR_IF(res == NULL, error);
4032 }
4033 else {
4034 res = value;
4035 }
4036 }
4037
4038 inst(FORMAT_WITH_SPEC, (value, fmt_spec -- res)) {
4039 res = PyObject_Format(value, fmt_spec);
4040 Py_DECREF(value);
4041 Py_DECREF(fmt_spec);
4042 ERROR_IF(res == NULL, error);
4043 }
4044
4045 pure inst(COPY, (bottom, unused[oparg-1] -- bottom, unused[oparg-1], top)) {
4046 assert(oparg > 0);
4047 top = Py_NewRef(bottom);
4048 }
4049
4050 specializing op(_SPECIALIZE_BINARY_OP, (counter/1, lhs, rhs -- lhs, rhs)) {
4051 #if ENABLE_SPECIALIZATION
4052 if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
4053 next_instr = this_instr;
4054 _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
4055 DISPATCH_SAME_OPARG();
4056 }
4057 STAT_INC(BINARY_OP, deferred);
4058 ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter);
4059 #endif /* ENABLE_SPECIALIZATION */
4060 assert(NB_ADD <= oparg);
4061 assert(oparg <= NB_INPLACE_XOR);
4062 }
4063
4064 op(_BINARY_OP, (lhs, rhs -- res)) {
4065 assert(_PyEval_BinaryOps[oparg]);
4066 res = _PyEval_BinaryOps[oparg](lhs, rhs);
4067 DECREF_INPUTS();
4068 ERROR_IF(res == NULL, error);
4069 }
4070
4071 macro(BINARY_OP) = _SPECIALIZE_BINARY_OP + _BINARY_OP;
4072
4073 pure inst(SWAP, (bottom, unused[oparg-2], top --
4074 top, unused[oparg-2], bottom)) {
4075 assert(oparg >= 2);
4076 }
4077
4078 inst(INSTRUMENTED_INSTRUCTION, ( -- )) {
4079 int next_opcode = _Py_call_instrumentation_instruction(
4080 tstate, frame, this_instr);
4081 ERROR_IF(next_opcode < 0, error);
4082 next_instr = this_instr;
4083 if (_PyOpcode_Caches[next_opcode]) {
4084 PAUSE_ADAPTIVE_COUNTER(next_instr[1].counter);
4085 }
4086 assert(next_opcode > 0 && next_opcode < 256);
4087 opcode = next_opcode;
4088 DISPATCH_GOTO();
4089 }
4090
4091 inst(INSTRUMENTED_JUMP_FORWARD, ( -- )) {
4092 INSTRUMENTED_JUMP(this_instr, next_instr + oparg, PY_MONITORING_EVENT_JUMP);
4093 }
4094
4095 inst(INSTRUMENTED_JUMP_BACKWARD, (unused/1 -- )) {
4096 CHECK_EVAL_BREAKER();
4097 INSTRUMENTED_JUMP(this_instr, next_instr - oparg, PY_MONITORING_EVENT_JUMP);
4098 }
4099
4100 inst(INSTRUMENTED_POP_JUMP_IF_TRUE, (unused/1 -- )) {
4101 PyObject *cond = POP();
4102 assert(PyBool_Check(cond));
4103 int flag = Py_IsTrue(cond);
4104 int offset = flag * oparg;
4105 #if ENABLE_SPECIALIZATION
4106 this_instr[1].cache = (this_instr[1].cache << 1) | flag;
4107 #endif
4108 INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
4109 }
4110
4111 inst(INSTRUMENTED_POP_JUMP_IF_FALSE, (unused/1 -- )) {
4112 PyObject *cond = POP();
4113 assert(PyBool_Check(cond));
4114 int flag = Py_IsFalse(cond);
4115 int offset = flag * oparg;
4116 #if ENABLE_SPECIALIZATION
4117 this_instr[1].cache = (this_instr[1].cache << 1) | flag;
4118 #endif
4119 INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
4120 }
4121
4122 inst(INSTRUMENTED_POP_JUMP_IF_NONE, (unused/1 -- )) {
4123 PyObject *value = POP();
4124 int flag = Py_IsNone(value);
4125 int offset;
4126 if (flag) {
4127 offset = oparg;
4128 }
4129 else {
4130 Py_DECREF(value);
4131 offset = 0;
4132 }
4133 #if ENABLE_SPECIALIZATION
4134 this_instr[1].cache = (this_instr[1].cache << 1) | flag;
4135 #endif
4136 INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
4137 }
4138
4139 inst(INSTRUMENTED_POP_JUMP_IF_NOT_NONE, (unused/1 -- )) {
4140 PyObject *value = POP();
4141 int offset;
4142 int nflag = Py_IsNone(value);
4143 if (nflag) {
4144 offset = 0;
4145 }
4146 else {
4147 Py_DECREF(value);
4148 offset = oparg;
4149 }
4150 #if ENABLE_SPECIALIZATION
4151 this_instr[1].cache = (this_instr[1].cache << 1) | !nflag;
4152 #endif
4153 INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
4154 }
4155
4156 tier1 inst(EXTENDED_ARG, ( -- )) {
4157 assert(oparg);
4158 opcode = next_instr->op.code;
4159 oparg = oparg << 8 | next_instr->op.arg;
4160 PRE_DISPATCH_GOTO();
4161 DISPATCH_GOTO();
4162 }
4163
4164 tier1 inst(CACHE, (--)) {
4165 assert(0 && "Executing a cache.");
4166 Py_FatalError("Executing a cache.");
4167 }
4168
4169 tier1 inst(RESERVED, (--)) {
4170 assert(0 && "Executing RESERVED instruction.");
4171 Py_FatalError("Executing RESERVED instruction.");
4172 }
4173
4174 ///////// Tier-2 only opcodes /////////
4175
4176 op (_GUARD_IS_TRUE_POP, (flag -- )) {
4177 SYNC_SP();
4178 EXIT_IF(!Py_IsTrue(flag));
4179 assert(Py_IsTrue(flag));
4180 }
4181
4182 op (_GUARD_IS_FALSE_POP, (flag -- )) {
4183 SYNC_SP();
4184 EXIT_IF(!Py_IsFalse(flag));
4185 assert(Py_IsFalse(flag));
4186 }
4187
4188 op (_GUARD_IS_NONE_POP, (val -- )) {
4189 SYNC_SP();
4190 if (!Py_IsNone(val)) {
4191 Py_DECREF(val);
4192 EXIT_IF(1);
4193 }
4194 }
4195
4196 op (_GUARD_IS_NOT_NONE_POP, (val -- )) {
4197 SYNC_SP();
4198 EXIT_IF(Py_IsNone(val));
4199 Py_DECREF(val);
4200 }
4201
4202 op(_JUMP_TO_TOP, (--)) {
4203 #ifndef _Py_JIT
4204 next_uop = ¤t_executor->trace[1];
4205 #endif
4206 }
4207
4208 tier2 op(_SET_IP, (instr_ptr/4 --)) {
4209 frame->instr_ptr = (_Py_CODEUNIT *)instr_ptr;
4210 }
4211
4212 tier2 op(_CHECK_STACK_SPACE_OPERAND, (framesize/2 --)) {
4213 assert(framesize <= INT_MAX);
4214 DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, framesize));
4215 DEOPT_IF(tstate->py_recursion_remaining <= 1);
4216 }
4217
4218 op(_SAVE_RETURN_OFFSET, (--)) {
4219 #if TIER_ONE
4220 frame->return_offset = (uint16_t)(next_instr - this_instr);
4221 #endif
4222 #if TIER_TWO
4223 frame->return_offset = oparg;
4224 #endif
4225 }
4226
4227 tier2 op(_EXIT_TRACE, (--)) {
4228 EXIT_TO_TRACE();
4229 }
4230
4231 tier2 op(_CHECK_VALIDITY, (--)) {
4232 DEOPT_IF(!current_executor->vm_data.valid);
4233 }
4234
4235 tier2 pure op(_LOAD_CONST_INLINE, (ptr/4 -- value)) {
4236 value = Py_NewRef(ptr);
4237 }
4238
4239 tier2 pure op(_LOAD_CONST_INLINE_BORROW, (ptr/4 -- value)) {
4240 value = ptr;
4241 }
4242
4243 tier2 pure op (_POP_TOP_LOAD_CONST_INLINE_BORROW, (ptr/4, pop -- value)) {
4244 Py_DECREF(pop);
4245 value = ptr;
4246 }
4247
4248 tier2 pure op(_LOAD_CONST_INLINE_WITH_NULL, (ptr/4 -- value, null)) {
4249 value = Py_NewRef(ptr);
4250 null = NULL;
4251 }
4252
4253 tier2 pure op(_LOAD_CONST_INLINE_BORROW_WITH_NULL, (ptr/4 -- value, null)) {
4254 value = ptr;
4255 null = NULL;
4256 }
4257
4258 tier2 op(_CHECK_FUNCTION, (func_version/2 -- )) {
4259 assert(PyFunction_Check(frame->f_funcobj));
4260 DEOPT_IF(((PyFunctionObject *)frame->f_funcobj)->func_version != func_version);
4261 }
4262
4263 /* Internal -- for testing executors */
4264 op(_INTERNAL_INCREMENT_OPT_COUNTER, (opt --)) {
4265 _PyCounterOptimizerObject *exe = (_PyCounterOptimizerObject *)opt;
4266 exe->count++;
4267 }
4268
4269 /* Only used for handling cold side exits, should never appear in
4270 * a normal trace or as part of an instruction.
4271 */
4272 tier2 op(_COLD_EXIT, (--)) {
4273 _PyExecutorObject *previous = (_PyExecutorObject *)tstate->previous_executor;
4274 _PyExitData *exit = &previous->exits[oparg];
4275 PyCodeObject *code = _PyFrame_GetCode(frame);
4276 _Py_CODEUNIT *target = _PyCode_CODE(code) + exit->target;
4277 _Py_BackoffCounter temperature = exit->temperature;
4278 if (!backoff_counter_triggers(temperature)) {
4279 exit->temperature = advance_backoff_counter(temperature);
4280 GOTO_TIER_ONE(target);
4281 }
4282 _PyExecutorObject *executor;
4283 if (target->op.code == ENTER_EXECUTOR) {
4284 executor = code->co_executors->executors[target->op.arg];
4285 Py_INCREF(executor);
4286 }
4287 else {
4288 int optimized = _PyOptimizer_Optimize(frame, target, stack_pointer, &executor);
4289 if (optimized <= 0) {
4290 exit->temperature = restart_backoff_counter(temperature);
4291 if (optimized < 0) {
4292 Py_DECREF(previous);
4293 tstate->previous_executor = Py_None;
4294 GOTO_UNWIND();
4295 }
4296 GOTO_TIER_ONE(target);
4297 }
4298 }
4299 /* We need two references. One to store in exit->executor and
4300 * one to keep the executor alive when executing. */
4301 Py_INCREF(executor);
4302 exit->executor = executor;
4303 GOTO_TIER_TWO(executor);
4304 }
4305
4306 tier2 op(_DYNAMIC_EXIT, (--)) {
4307 tstate->previous_executor = (PyObject *)current_executor;
4308 _PyExitData *exit = (_PyExitData *)¤t_executor->exits[oparg];
4309 _Py_CODEUNIT *target = frame->instr_ptr;
4310 _PyExecutorObject *executor;
4311 if (target->op.code == ENTER_EXECUTOR) {
4312 PyCodeObject *code = (PyCodeObject *)frame->f_executable;
4313 executor = code->co_executors->executors[target->op.arg];
4314 Py_INCREF(executor);
4315 }
4316 else {
4317 if (!backoff_counter_triggers(exit->temperature)) {
4318 exit->temperature = advance_backoff_counter(exit->temperature);
4319 GOTO_TIER_ONE(target);
4320 }
4321 int optimized = _PyOptimizer_Optimize(frame, target, stack_pointer, &executor);
4322 if (optimized <= 0) {
4323 exit->temperature = restart_backoff_counter(exit->temperature);
4324 if (optimized < 0) {
4325 GOTO_UNWIND();
4326 }
4327 GOTO_TIER_ONE(target);
4328 }
4329 else {
4330 exit->temperature = initial_temperature_backoff_counter();
4331 }
4332 }
4333 GOTO_TIER_TWO(executor);
4334 }
4335
4336 tier2 op(_START_EXECUTOR, (executor/4 --)) {
4337 Py_DECREF(tstate->previous_executor);
4338 tstate->previous_executor = NULL;
4339 #ifndef _Py_JIT
4340 current_executor = (_PyExecutorObject*)executor;
4341 #endif
4342 DEOPT_IF(!((_PyExecutorObject *)executor)->vm_data.valid);
4343 }
4344
4345 tier2 op(_FATAL_ERROR, (--)) {
4346 assert(0);
4347 Py_FatalError("Fatal error uop executed.");
4348 }
4349
4350 tier2 op(_CHECK_VALIDITY_AND_SET_IP, (instr_ptr/4 --)) {
4351 DEOPT_IF(!current_executor->vm_data.valid);
4352 frame->instr_ptr = (_Py_CODEUNIT *)instr_ptr;
4353 }
4354
4355 tier2 op(_DEOPT, (--)) {
4356 EXIT_TO_TIER1();
4357 }
4358
4359 tier2 op(_ERROR_POP_N, (target/2, unused[oparg] --)) {
4360 frame->instr_ptr = ((_Py_CODEUNIT *)_PyFrame_GetCode(frame)->co_code_adaptive) + target;
4361 SYNC_SP();
4362 GOTO_UNWIND();
4363 }
4364
4365 /* Progress is guaranteed if we DEOPT on the eval breaker, because
4366 * ENTER_EXECUTOR will not re-enter tier 2 with the eval breaker set. */
4367 tier2 op(_TIER2_RESUME_CHECK, (--)) {
4368 #if defined(__EMSCRIPTEN__)
4369 DEOPT_IF(_Py_emscripten_signal_clock == 0);
4370 _Py_emscripten_signal_clock -= Py_EMSCRIPTEN_SIGNAL_HANDLING;
4371 #endif
4372 uintptr_t eval_breaker = _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker);
4373 DEOPT_IF(eval_breaker & _PY_EVAL_EVENTS_MASK);
4374 assert(tstate->tracing || eval_breaker == FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version));
4375 }
4376
4377 // END BYTECODES //
4378
4379 }
4380 dispatch_opcode:
4381 error:
4382 exception_unwind:
4383 exit_unwind:
4384 handle_eval_breaker:
4385 resume_frame:
4386 resume_with_error:
4387 start_frame:
4388 unbound_local_error:
4389 ;
4390 }
4391
4392 // Future families go below this point //
4393