1"""Generate the main interpreter switch. 2Reads the instruction definitions from bytecodes.c. 3Writes the cases to generated_cases.c.h, which is #included in ceval.c. 4""" 5 6import argparse 7import os.path 8import sys 9 10from analyzer import ( 11 Analysis, 12 Instruction, 13 Uop, 14 Part, 15 analyze_files, 16 Skip, 17 StackItem, 18 analysis_error, 19) 20from generators_common import ( 21 DEFAULT_INPUT, 22 ROOT, 23 write_header, 24 emit_tokens, 25) 26from cwriter import CWriter 27from typing import TextIO, Iterator 28from lexer import Token 29from stack import StackOffset, Stack, SizeMismatch 30 31 32DEFAULT_OUTPUT = ROOT / "Python/generated_cases.c.h" 33 34 35FOOTER = "#undef TIER_ONE\n" 36 37 38def declare_variables(inst: Instruction, out: CWriter) -> None: 39 variables = {"unused"} 40 for uop in inst.parts: 41 if isinstance(uop, Uop): 42 for var in reversed(uop.stack.inputs): 43 if var.name not in variables: 44 type = var.type if var.type else "PyObject *" 45 variables.add(var.name) 46 if var.condition: 47 out.emit(f"{type}{var.name} = NULL;\n") 48 else: 49 out.emit(f"{type}{var.name};\n") 50 for var in uop.stack.outputs: 51 if var.name not in variables: 52 variables.add(var.name) 53 type = var.type if var.type else "PyObject *" 54 if var.condition: 55 out.emit(f"{type}{var.name} = NULL;\n") 56 else: 57 out.emit(f"{type}{var.name};\n") 58 59 60def write_uop( 61 uop: Part, out: CWriter, offset: int, stack: Stack, inst: Instruction, braces: bool 62) -> int: 63 # out.emit(stack.as_comment() + "\n") 64 if isinstance(uop, Skip): 65 entries = "entries" if uop.size > 1 else "entry" 66 out.emit(f"/* Skip {uop.size} cache {entries} */\n") 67 return offset + uop.size 68 try: 69 out.start_line() 70 if braces: 71 out.emit(f"// {uop.name}\n") 72 for var in reversed(uop.stack.inputs): 73 out.emit(stack.pop(var)) 74 if braces: 75 out.emit("{\n") 76 if not uop.properties.stores_sp: 77 for i, var in enumerate(uop.stack.outputs): 78 out.emit(stack.push(var)) 79 for cache in uop.caches: 80 if cache.name != "unused": 81 if cache.size == 4: 82 type = "PyObject *" 83 reader = "read_obj" 84 else: 85 type = f"uint{cache.size*16}_t " 86 reader = f"read_u{cache.size*16}" 87 out.emit( 88 f"{type}{cache.name} = {reader}(&this_instr[{offset}].cache);\n" 89 ) 90 if inst.family is None: 91 out.emit(f"(void){cache.name};\n") 92 offset += cache.size 93 emit_tokens(out, uop, stack, inst) 94 if uop.properties.stores_sp: 95 for i, var in enumerate(uop.stack.outputs): 96 out.emit(stack.push(var)) 97 if braces: 98 out.start_line() 99 out.emit("}\n") 100 # out.emit(stack.as_comment() + "\n") 101 return offset 102 except SizeMismatch as ex: 103 raise analysis_error(ex.args[0], uop.body[0]) 104 105 106def uses_this(inst: Instruction) -> bool: 107 if inst.properties.needs_this: 108 return True 109 for uop in inst.parts: 110 if isinstance(uop, Skip): 111 continue 112 for cache in uop.caches: 113 if cache.name != "unused": 114 return True 115 return False 116 117 118def generate_tier1( 119 filenames: list[str], analysis: Analysis, outfile: TextIO, lines: bool 120) -> None: 121 write_header(__file__, filenames, outfile) 122 outfile.write( 123 """ 124#ifdef TIER_TWO 125 #error "This file is for Tier 1 only" 126#endif 127#define TIER_ONE 1 128""" 129 ) 130 out = CWriter(outfile, 2, lines) 131 out.emit("\n") 132 for name, inst in sorted(analysis.instructions.items()): 133 needs_this = uses_this(inst) 134 out.emit("\n") 135 out.emit(f"TARGET({name}) {{\n") 136 unused_guard = "(void)this_instr;\n" if inst.family is None else "" 137 if needs_this and not inst.is_target: 138 out.emit(f"_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;\n") 139 out.emit(unused_guard) 140 else: 141 out.emit(f"frame->instr_ptr = next_instr;\n") 142 out.emit(f"next_instr += {inst.size};\n") 143 out.emit(f"INSTRUCTION_STATS({name});\n") 144 if inst.is_target: 145 out.emit(f"PREDICTED({name});\n") 146 if needs_this: 147 out.emit(f"_Py_CODEUNIT *this_instr = next_instr - {inst.size};\n") 148 out.emit(unused_guard) 149 if inst.family is not None: 150 out.emit( 151 f"static_assert({inst.family.size} == {inst.size-1}" 152 ', "incorrect cache size");\n' 153 ) 154 declare_variables(inst, out) 155 offset = 1 # The instruction itself 156 stack = Stack() 157 for part in inst.parts: 158 # Only emit braces if more than one uop 159 insert_braces = len([p for p in inst.parts if isinstance(p, Uop)]) > 1 160 offset = write_uop(part, out, offset, stack, inst, insert_braces) 161 out.start_line() 162 if not inst.parts[-1].properties.always_exits: 163 stack.flush(out) 164 if inst.parts[-1].properties.ends_with_eval_breaker: 165 out.emit("CHECK_EVAL_BREAKER();\n") 166 out.emit("DISPATCH();\n") 167 out.start_line() 168 out.emit("}") 169 out.emit("\n") 170 outfile.write(FOOTER) 171 172 173arg_parser = argparse.ArgumentParser( 174 description="Generate the code for the interpreter switch.", 175 formatter_class=argparse.ArgumentDefaultsHelpFormatter, 176) 177 178arg_parser.add_argument( 179 "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT 180) 181 182arg_parser.add_argument( 183 "-l", "--emit-line-directives", help="Emit #line directives", action="store_true" 184) 185 186arg_parser.add_argument( 187 "input", nargs=argparse.REMAINDER, help="Instruction definition file(s)" 188) 189 190 191def generate_tier1_from_files( 192 filenames: list[str], outfilename: str, lines: bool 193) -> None: 194 data = analyze_files(filenames) 195 with open(outfilename, "w") as outfile: 196 generate_tier1(filenames, data, outfile, lines) 197 198 199if __name__ == "__main__": 200 args = arg_parser.parse_args() 201 if len(args.input) == 0: 202 args.input.append(DEFAULT_INPUT) 203 data = analyze_files(args.input) 204 with open(args.output, "w") as outfile: 205 generate_tier1(args.input, data, outfile, args.emit_line_directives) 206