1 /**
2 * Copyright 2024 Huawei Technologies Co., Ltd
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include "pipeline/jit/pi/graph_capture/code_generator.h"
17 #include <set>
18 #include <regex>
19 #include "pipeline/jit/pi/graph_capture/local_liveness.h"
20 #include "pipeline/jit/pi/graph_capture/graph.h"
21 #include "pipeline/jit/pi/graph_capture/cfg.h"
22 #include "pipeline/jit/pi/graph_capture/side_effect.h"
23 #include "pipeline/jit/pi/utils/utils.h"
24 #include "pipeline/jit/pi/common.h"
25 #include "pipeline/jit/pi/external.h"
26 #include "pipeline/jit/pi/graph_compiler/compiler.h"
27
28 #ifndef PY_MAKECODEUNIT
29 #ifdef WORDS_BIGENDIAN
30 #define PY_MAKECODEUNIT(opcode, oparg) (MS_ASSERT((opcode) < NO_IMPL_OPCODE), ((opcode) << 8) | (oparg))
31 #else
32 #define PY_MAKECODEUNIT(opcode, oparg) (MS_ASSERT((opcode) < NO_IMPL_OPCODE), (opcode) | ((oparg) << 8))
33 #endif
34 #endif
35
36 namespace mindspore {
37 namespace pijit {
38 constexpr const size_t MoveEightBits = 8;
39
40 class GraphParameterBuilder {
41 public:
42 static std::string Key(int, ValueNode *n);
43
44 void Init(const std::vector<ValueNode *> &args, const std::vector<ValueNode *> &globals, ValueNode *vargs,
45 ValueNode *kwargs);
46 void Build(const std::unordered_map<ValueNode *, int> &locals);
47
48 std::vector<ValueNode *> args_;
49 std::vector<ValueNode *> globals_;
50 std::vector<std::unique_ptr<Instr>> load_; // load parameters and store parameters to global, for caller
51 std::vector<std::unique_ptr<Instr>> dele_; // delete global parameters, for caller
52 std::vector<std::unique_ptr<Instr>> sort_; // load global parameter and store to locals, for callee
53 ValueNode *vargs_;
54 ValueNode *kwargs_;
55
56 private:
57 void BuildVargs(const std::unordered_map<ValueNode *, int> &locals);
58 void BuildKwVargs(const std::unordered_map<ValueNode *, int> &locals);
59 };
60
61 static bool FindBlock(int start_bci, const CFG *cfg, int *target_bci, int *stack_effect);
62 std::string PrintInstr(const std::vector<std::unique_ptr<Instr>> &list);
63 std::string PrintNodeSet(const NodeSet &);
64
GenerateObjectKey(const py::object & value)65 std::string GenerateObjectKey(const py::object &value) {
66 PyTypeObject *tp = Py_TYPE(value.ptr());
67 std::stringstream s;
68 s << (tp->tp_name ? tp->tp_name : "<unnamed>");
69 if (tp == &PyFunction_Type) {
70 s << "[" << PyUnicode_AsUTF8(reinterpret_cast<PyFunctionObject *>(value.ptr())->func_qualname) << "]";
71 }
72 if (tp == &PyModule_Type) {
73 s << "[" << PyModule_GetName(value.ptr()) << "]";
74 }
75 s << "<" << value.ptr() << ">";
76 return s.str();
77 }
78
MapAdd(const py::dict & dict,const std::string & key,const py::object & value,std::string * rename)79 void MapAdd(const py::dict &dict, const std::string &key, const py::object &value, std::string *rename) {
80 py::str key_object(key);
81 PyObject *old = PyDict_GetItem(dict.ptr(), key_object.ptr());
82 if (old == value.ptr()) {
83 return;
84 }
85 if (old == nullptr) {
86 PyDict_SetItem(dict.ptr(), key_object.ptr(), value.ptr());
87 return;
88 }
89 if (rename != nullptr) {
90 std::string new_key = GenerateObjectKey(value);
91 if (new_key != key) {
92 PyDict_SetItem(dict.ptr(), py::str(new_key).ptr(), value.ptr());
93 *rename = new_key;
94 return;
95 }
96 }
97 MS_LOG(INTERNAL_EXCEPTION) << "duplicate dict value, key: " << key << ", old value at " << old << ": "
98 << std::string(py::str(old)) << " -> new value at " << value.ptr() << ": "
99 << std::string(py::str(value.ptr()));
100 }
101
GetOpcodeMaxStackEffect(int op,int arg,bool jump)102 static int GetOpcodeMaxStackEffect(int op, int arg, bool jump) {
103 int off;
104 off = PyCompile_OpcodeStackEffect(op, arg);
105 if (op == NOP || op == EXTENDED_ARG) {
106 return 0;
107 }
108 if (op == END_FINALLY) {
109 return -1;
110 }
111 return off;
112 }
113
CalculateStackSize(const std::vector<std::unique_ptr<Instr>> & list,int sp)114 int CodeGenerator::CalculateStackSize(const std::vector<std::unique_ptr<Instr>> &list, int sp) {
115 std::unordered_map<Instr *, int> blocks;
116 int max_depth = 0;
117 int flag = 0;
118 for (size_t i = 0; i < list.size(); ++i) {
119 Instr *instr = list[i].get();
120 int op = instr->op();
121 int arg = instr->arg();
122 Instr *jump = instr->extra_jump();
123 auto iter = blocks.find(instr);
124 if (iter != blocks.end()) {
125 flag = 0;
126 sp = iter->second;
127 } else if (flag == 1) {
128 continue;
129 }
130 if (op == RAISE_VARARGS || op == RETURN_VALUE || op == RERAISE) {
131 flag = 1;
132 }
133 if (jump != nullptr) {
134 iter = blocks.find(jump);
135 int jump_sp = sp + GetOpcodeMaxStackEffect(op, arg, true);
136 blocks[jump] = (iter == blocks.end()) ? jump_sp : std::max(iter->second, jump_sp);
137 }
138 sp += GetOpcodeMaxStackEffect(op, arg, false);
139 max_depth = std::max(sp, max_depth);
140 }
141 return sp < 0 ? -1 : max_depth;
142 }
143
144 // reset bci, reset jump offset
CalculateOffset(const std::vector<std::unique_ptr<Instr>> & list)145 static void CalculateOffset(const std::vector<std::unique_ptr<Instr>> &list) {
146 constexpr auto InstrSize = [](unsigned arg) constexpr {
147 return arg <= 0xff ? 1 : arg <= 0xffff ? 2 : arg <= 0xffffff ? 3 : 4;
148 };
149
150 bool re_calc;
151 do {
152 re_calc = false;
153 int bci = -1;
154 for (const auto &i : list) {
155 bci += InstrSize(i->arg());
156 i->set_bci(bci);
157 }
158 for (const auto &i : list) {
159 int isize = InstrSize(i->arg());
160 Instr *tar = i->extra_jump();
161 if (tar) {
162 i->set_arg(Opcode(i->op()).JumpOffset(i->bci(), tar->bci() - InstrSize(tar->arg()) + 1));
163 re_calc |= isize != InstrSize(i->arg());
164 }
165 }
166 } while (re_calc);
167 }
168
ConvertToCodeBytes(const std::vector<std::unique_ptr<Instr>> & list,int first_line)169 std::pair<py::bytes, py::bytes> CodeGenerator::ConvertToCodeBytes(const std::vector<std::unique_ptr<Instr>> &list,
170 int first_line) {
171 std::vector<char> co_lnotab;
172 std::vector<_Py_CODEUNIT> co_code;
173
174 CalculateOffset(list);
175
176 int line = first_line > 0 ? first_line : 0;
177 int bci = 0;
178 for (const auto &i : list) {
179 int addr_off = sizeof(_Py_CODEUNIT) * (i->bci() - bci);
180 int line_off = i->line() - line;
181 if (i->line() != -1 && line_off > 0 && line_off < INT8_MAX && addr_off < INT8_MAX) {
182 co_lnotab.push_back(addr_off);
183 co_lnotab.push_back(line_off);
184 bci = i->bci();
185 line = i->line();
186 }
187 int oparg = i->arg();
188 for (unsigned c = 0, exa = IntToSize(oparg) >> MoveEightBits; exa > 0; exa >>= MoveEightBits, ++c) {
189 co_code.insert(co_code.end() - c, PY_MAKECODEUNIT(EXTENDED_ARG, exa & 0xff));
190 }
191 co_code.push_back(PY_MAKECODEUNIT(i->op(), (signed)oparg & 0xff));
192 }
193 const char *code_data = reinterpret_cast<const char *>(co_code.data());
194 const size_t code_size = co_code.size() * sizeof(co_code[0]);
195 return {py::bytes(code_data, code_size), py::bytes(co_lnotab.data(), co_lnotab.size())};
196 }
197
SetNamedInstrIndex(const std::unique_ptr<Instr> & i,std::unordered_map<std::string,int> * co_names)198 static void SetNamedInstrIndex(const std::unique_ptr<Instr> &i, std::unordered_map<std::string, int> *co_names) {
199 if (!Opcode(i->op()).HasName()) {
200 return;
201 }
202 int arg;
203 auto iter = co_names->find(i->name());
204 if (iter != co_names->end()) {
205 arg = iter->second;
206 } else {
207 arg = SizeToInt(co_names->size());
208 co_names->insert({i->name(), arg});
209 }
210 i->set_arg(arg);
211 }
212
SetLoadConstIndex(const std::unique_ptr<Instr> & i,const py::dict & consts)213 static void SetLoadConstIndex(const std::unique_ptr<Instr> &i, const py::dict &consts) {
214 if (i->op() != LOAD_CONST) {
215 return;
216 }
217 PyObject *co_consts = consts.ptr();
218 PyObject *cnst = i->cnst().ptr();
219 MS_EXCEPTION_IF_CHECK_FAIL(cnst != nullptr, "LOAD_CONST instruction not set object");
220
221 PyObject *key = _PyCode_ConstantKey(cnst);
222 if (key != nullptr) {
223 PyObject *index = PyDict_GetItem(co_consts, key);
224 Py_ssize_t arg;
225 if (index != nullptr) {
226 arg = PyLong_AsLong(index);
227 } else {
228 arg = PyDict_GET_SIZE(co_consts);
229 PyDict_SetItem(co_consts, key, py::int_(arg).ptr());
230 }
231 i->set_arg(arg);
232 Py_DECREF(key);
233 if (!PyErr_Occurred()) {
234 return;
235 }
236 }
237 throw py::error_already_set();
238 }
239
ConstsMapToTuple(const py::dict & consts)240 static py::tuple ConstsMapToTuple(const py::dict &consts) {
241 const Py_ssize_t size = PyDict_GET_SIZE(consts.ptr());
242 py::tuple co_consts(size);
243
244 PyObject *key;
245 PyObject *val;
246 Py_ssize_t pos = 0;
247 while (PyDict_Next(consts.ptr(), &pos, &key, &val)) {
248 Py_ssize_t index = PyLong_AsLong(val);
249 if (PyTuple_CheckExact(key)) {
250 key = PyTuple_GET_ITEM(key, 1);
251 }
252 Py_INCREF(key);
253 PyTuple_SET_ITEM(co_consts.ptr(), index, key);
254 }
255 return co_consts;
256 }
257
NamesMapToTuple(const std::unordered_map<std::string,int> & names)258 static py::tuple NamesMapToTuple(const std::unordered_map<std::string, int> &names) {
259 py::tuple co_names(names.size());
260 for (const auto &i : names) {
261 PyTuple_SET_ITEM(co_names.ptr(), i.second, PyUnicode_FromStringAndSize(i.first.data(), i.first.size()));
262 }
263 return co_names;
264 }
265
ConvertVector(const std::vector<std::string> & names,bool to_tuple=true)266 static py::object ConvertVector(const std::vector<std::string> &names, bool to_tuple = true) {
267 size_t size = names.size();
268 PyObject *list = to_tuple ? PyTuple_New(size) : PyList_New(size);
269 for (; size > 0; --size) {
270 const std::string &n = names[size - 1];
271 if (to_tuple) {
272 PyTuple_SET_ITEM(list, size - 1, PyUnicode_FromStringAndSize(n.data(), n.size()));
273 } else {
274 PyList_SET_ITEM(list, size - 1, PyUnicode_FromStringAndSize(n.data(), n.size()));
275 }
276 }
277 return py::reinterpret_steal<py::object>(list);
278 }
279
FillVariableName(const std::vector<std::string> & varnames,int nlocals)280 static py::tuple FillVariableName(const std::vector<std::string> &varnames, int nlocals) {
281 MS_EXCEPTION_IF_CHECK_FAIL(varnames.size() <= static_cast<size_t>(nlocals), "too small local count !!");
282 std::set<std::string> vars;
283 py::tuple co_varnames(nlocals);
284 int size = SizeToInt(varnames.size());
285 for (int i = 0; i < nlocals; ++i) {
286 std::string n;
287 if (i < size) {
288 n = varnames[i];
289 } else {
290 n = std::to_string(i) + "_local";
291 }
292 while (vars.find(n) != vars.end()) {
293 n = n + "_" + std::to_string(i);
294 }
295 vars.insert(n);
296 PyTuple_SET_ITEM(co_varnames.ptr(), i, PyUnicode_FromStringAndSize(n.data(), n.size()));
297 }
298 return co_varnames;
299 }
300
AttachCodeID(const std::string & co_name)301 static std::string AttachCodeID(const std::string &co_name) {
302 static size_t id = 0;
303 constexpr const char *mark = "I.";
304 constexpr const char *reg_mark = "\\d+I.";
305 return std::to_string(id++) + mark + std::regex_replace(co_name, std::regex(reg_mark), "");
306 }
307
MakeCompiledName(const std::string & co_name)308 static std::string MakeCompiledName(const std::string &co_name) {
309 static size_t id = 0;
310 constexpr const char *reg_mark = "<compile\\[\\d+\\]>";
311 return "<compile[" + std::to_string(id++) + "]>" + std::regex_replace(co_name, std::regex(reg_mark), "");
312 }
313
MakeBrkName(const std::string & co_name,int bci)314 static std::string MakeBrkName(const std::string &co_name, int bci) {
315 constexpr const char *mark = "B.";
316 constexpr const char *reg_mark = "\\d+B.";
317 return std::to_string(bci) + mark + std::regex_replace(co_name, std::regex(reg_mark), "");
318 }
319
Transform(const Code & ccode)320 py::object CodeGenerator::Transform(const Code &ccode) {
321 std::unordered_map<std::string, int> names;
322 py::dict consts;
323 int co_stacksize;
324
325 for (const auto &i : ccode.co_code) {
326 SetNamedInstrIndex(i, &names);
327 SetLoadConstIndex(i, consts);
328 }
329 co_stacksize = CalculateStackSize(ccode.co_code);
330 if (co_stacksize < 0) {
331 MS_LOG(ERROR) << "\n" << PrintInstr(ccode.co_code);
332 MS_EXCEPTION_IF_CHECK_FAIL(co_stacksize >= 0, "check instruction list, computer stack size failed");
333 }
334
335 std::pair<py::bytes, py::bytes> code_info = ConvertToCodeBytes(ccode.co_code, ccode.co_firstlineno);
336 py::bytes co_code = std::move(code_info.first);
337 py::bytes co_lnotab = std::move(code_info.second);
338 py::tuple co_consts = ConstsMapToTuple(consts);
339 py::tuple co_names = NamesMapToTuple(names);
340 py::object co_varnames = FillVariableName(ccode.co_varnames, ccode.co_nlocals);
341 py::object co_freevars = ConvertVector(ccode.co_freevars);
342 py::object co_cellvars = ConvertVector(ccode.co_cellvars);
343 py::str co_name(AttachCodeID(ccode.co_name));
344
345 PyCodeObject *new_code = PyCode_New(ccode.co_argcount, // co_argcount
346 ccode.co_kwonlyargcount, // co_kwonlyargcount
347 ccode.co_nlocals, // co_nlocals
348 co_stacksize, // co_stacksize
349 ccode.co_flags, // co_flags
350 co_code.ptr(), // co_code
351 co_consts.ptr(), // co_consts
352 co_names.ptr(), // co_names
353 co_varnames.ptr(), // co_varnames
354 co_freevars.ptr(), // co_freevars
355 co_cellvars.ptr(), // co_cellvars
356 ccode.co_filename.ptr(), // co_filename
357 co_name.ptr(), // co_name
358 ccode.co_firstlineno, // co_firstlineno
359 co_lnotab.ptr()); // co_lnotab
360
361 if (new_code != nullptr) {
362 return py::reinterpret_steal<py::object>(reinterpret_cast<PyObject *>(new_code));
363 }
364 throw py::error_already_set();
365 }
366
CopyInstr(const std::vector<std::unique_ptr<Instr>> & list,size_t start_bci,size_t end_bci)367 std::vector<std::unique_ptr<Instr>> CodeGenerator::CopyInstr(const std::vector<std::unique_ptr<Instr>> &list,
368 size_t start_bci, size_t end_bci) {
369 std::vector<std::pair<size_t, size_t>> edges;
370 std::vector<std::unique_ptr<Instr>> instrs;
371
372 bool insert_nop_to_end = false;
373 size_t size = std::min(list.size(), end_bci);
374 for (size_t bci = start_bci; bci < size; ++bci) {
375 const auto &i = list[bci];
376 size_t index = (size_t)i->bci() - start_bci;
377 instrs.emplace_back(std::make_unique<Instr>(i->op(), i->arg(), index, i->line()));
378 instrs.back()->set_name(i->name());
379 instrs.back()->set_cnst(i->cnst());
380 if (i->op() == LOAD_METHOD) {
381 instrs.back()->set_op(LOAD_ATTR);
382 } else if (i->op() == CALL_METHOD) {
383 instrs.back()->set_op(CALL_FUNCTION);
384 }
385 if (i->extra_jump()) {
386 size_t tar = i->extra_jump()->bci();
387 bool valid = i->bci() == SizeToInt(bci) && start_bci <= tar && tar <= size;
388 if (!valid) {
389 MS_LOG(INTERNAL_EXCEPTION) << "check instruction index failed," << i->bci() << " == " << bci << " && "
390 << start_bci << " <= " << tar << " && " << tar << " <= " << size;
391 }
392 insert_nop_to_end |= (tar == size);
393 edges.push_back({index, tar - start_bci});
394 }
395 }
396 if (insert_nop_to_end) {
397 instrs.emplace_back(std::make_unique<Instr>(NOP, 0, instrs.size()));
398 }
399 for (const auto &i : edges) {
400 instrs[i.first]->set_extra_jump(instrs[i.second].get());
401 }
402 return instrs;
403 }
404
EraseUnusedInstr(std::vector<std::unique_ptr<Instr>> * list)405 void CodeGenerator::EraseUnusedInstr(std::vector<std::unique_ptr<Instr>> *list) {
406 auto NeedRemove = [](const std::vector<std::unique_ptr<Instr>>::iterator &i) {
407 int op = (*i)->op();
408 if (op == NOP || op == EXTENDED_ARG) {
409 return true;
410 }
411 if (op == JUMP_ABSOLUTE || op == JUMP_FORWARD) { // jump to next
412 return (*i)->extra_jump() == (i + 1)->get();
413 }
414 return false;
415 };
416 // mark unused instruction
417 auto erase_iter = list->begin();
418 int bci = 0;
419 for (auto i = list->begin(); i != list->end(); ++i) {
420 if (NeedRemove(i)) {
421 (*i)->set_bci(-1);
422 (*i)->set_extra_jump((i + 1)->get());
423 } else {
424 (*i)->set_bci(bci);
425 std::swap(*erase_iter, *i);
426 ++erase_iter;
427 ++bci;
428 }
429 }
430 if (erase_iter == list->end()) {
431 return;
432 }
433 // reset jump
434 for (auto i = list->begin(); i != erase_iter; ++i) {
435 Instr *tar = (*i)->extra_jump();
436 if (tar == nullptr) {
437 continue;
438 }
439 while (tar->bci() == -1) {
440 MS_EXCEPTION_IF_NULL(tar->extra_jump());
441 tar = tar->extra_jump();
442 }
443 (*i)->set_extra_jump(tar);
444 }
445 list->erase(erase_iter, list->end());
446 }
447
EraseUnusedInstr()448 void CodeGenerator::EraseUnusedInstr() { EraseUnusedInstr(&code_.co_code); }
449
RotStack(int stack)450 std::vector<std::unique_ptr<Instr>> CodeGenerator::RotStack(int stack) {
451 std::vector<std::unique_ptr<Instr>> res;
452 if (stack == 0) {
453 return res;
454 #if (PY_MAJOR_VERSION == 3) && (PY_MINOR_VERSION == 10)
455 } else {
456 res.push_back(std::make_unique<Instr>(ROT_N, stack + 1));
457 #elif PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION < 10 && PY_MINOR_VERSION >= 7
458 } else if (stack == 1) {
459 res.push_back(std::make_unique<Instr>(ROT_TWO));
460 } else if (stack == 2) {
461 res.push_back(std::make_unique<Instr>(ROT_THREE));
462 #if (PY_MAJOR_VERSION == 3) && (PY_MINOR_VERSION > 7)
463 } else if (stack == 3) {
464 res.push_back(std::make_unique<Instr>(ROT_FOUR));
465 #endif
466 } else {
467 MS_LOG(DEBUG) << ("too many stack value, will build tuple to process\n");
468 res.insert(res.begin(), std::make_unique<Instr>(BUILD_TUPLE, stack));
469 res.insert(res.begin(), std::make_unique<Instr>(UNPACK_SEQUENCE, stack));
470 res.insert(res.begin(), std::make_unique<Instr>(BUILD_TUPLE, stack)); // reverse tuple
471 res.push_back(std::make_unique<Instr>(ROT_TWO));
472 res.push_back(std::make_unique<Instr>(UNPACK_SEQUENCE, stack));
473 #endif
474 }
475
476 return res;
477 }
478
PrintAlive() const479 std::string CodeGenerator::PrintAlive() const {
480 std::stringstream s;
481 std::unordered_map<int, std::vector<ValueNode *>> sorted;
482 for (const auto &i : nodes_alive_) {
483 sorted[i.second].push_back(i.first);
484 }
485 for (const auto &i : sorted) {
486 s << i.first << ": ";
487 for (const auto &node : i.second) {
488 s << node << " ";
489 }
490 s << "\n";
491 }
492 return s.str();
493 }
494
495 /**
496 * traverse all values in reverse order, set alive time for each input value
497 * the inputs of all values is before these values
498 */
MarkAlive()499 void CodeGenerator::MarkAlive() {
500 for (auto i : nodes_->outputs) {
501 MarkAlive(i);
502 }
503 for (int index = nodes_->operations.size() - 1; index >= 0; --index) {
504 ValueNode *node = nodes_->operations[index];
505 for (auto input : node->getInputs()) {
506 int cur = nodes_alive_[input];
507 nodes_alive_[input] = std::max(cur, index);
508 }
509 }
510 }
511
AllocLocal(ValueNode * node,int index)512 int CodeGenerator::AllocLocal(ValueNode *node, int index) {
513 auto iter = locals_map_.find(node);
514 if (iter != locals_map_.end()) {
515 return iter->second;
516 }
517 int res;
518 std::set<int> used_slots; // order set
519 for (iter = locals_map_.begin(); iter != locals_map_.end(); ++iter) {
520 if (index != INT_MAX && nodes_alive_[iter->first] <= index) {
521 res = iter->second;
522 locals_map_.erase(iter);
523 locals_map_.insert({node, res});
524 return res;
525 }
526 used_slots.insert(iter->second);
527 }
528 res = 0;
529 for (auto i = used_slots.begin(); i != used_slots.end() && res == (*i); ++i, ++res) {
530 }
531 locals_map_.insert({node, res});
532 SetLocalsCount(res);
533 return res;
534 }
535
NewInstr(int op,int arg,int line)536 void CodeGenerator::NewInstr(int op, int arg, int line) {
537 code_.co_code.emplace_back(std::make_unique<Instr>(op, arg, -1, line));
538 }
539
AddInstr(std::unique_ptr<Instr> && instr)540 void CodeGenerator::AddInstr(std::unique_ptr<Instr> &&instr) { code_.co_code.emplace_back(std::move(instr)); }
541
AddInstrs(std::vector<std::unique_ptr<Instr>> && l)542 void CodeGenerator::AddInstrs(std::vector<std::unique_ptr<Instr>> &&l) {
543 code_.co_code.insert(code_.co_code.end(), std::make_move_iterator(l.begin()), std::make_move_iterator(l.end()));
544 }
545
LoadValue(ValueNode * node)546 void CodeGenerator::LoadValue(ValueNode *node) {
547 auto iter = locals_map_.find(node);
548 if (iter != locals_map_.end()) {
549 NewInstr(LOAD_FAST, iter->second);
550 return;
551 }
552 if (node->GetType() == ValueNode::CellVar || node->GetType() == ValueNode::FreeVar) {
553 int index = static_cast<CellVarNode *>(node)->GetIndex();
554 if (index < 0) {
555 LoadConst(py::reinterpret_steal<py::object>(PyCell_New(nullptr)));
556 } else {
557 NewInstr(LOAD_CLOSURE, index);
558 }
559 return;
560 }
561 int opcode = node->GetOpcode();
562 if (opcode == LOAD_DEREF) {
563 NewInstr(opcode, node->GetOparg());
564 return;
565 }
566 std::string key = node->GetName();
567 if (opcode == LOAD_GLOBAL) {
568 PyObject *globals = node->GetGraph() ? node->GetGraph()->GetGlobals().ptr() : nullptr;
569 MS_EXCEPTION_IF_NULL(globals);
570 if (globals != GetGlobals().ptr()) {
571 py::str key_object(key);
572 PyObject *value = PyObject_GetItem(globals, key_object.ptr());
573 if (value != nullptr) {
574 py::object handle_value = py::reinterpret_steal<py::object>(value);
575 MapAdd(GetGlobals(), key, handle_value, &key);
576 } else {
577 // name error, global undefined
578 PyErr_Clear();
579 }
580 }
581 NewInstr(LOAD_GLOBAL);
582 code_.co_code.back()->set_name(key);
583 return;
584 }
585
586 py::object cnst = node->GetVobj()->GetPyObject();
587 if (opcode == LOAD_CONST) {
588 LoadConst(cnst);
589 return;
590 }
591 MS_LOG(INTERNAL_EXCEPTION) << "missing value, [" << node->ToString() << "]";
592 }
593
LoadConst(const py::object & cnst)594 void CodeGenerator::LoadConst(const py::object &cnst) {
595 MS_EXCEPTION_IF_NULL(cnst.ptr());
596 if (CheckConstPyObject(cnst.ptr())) {
597 NewInstr(LOAD_CONST);
598 code_.co_code.back()->set_cnst(cnst);
599 return;
600 }
601 std::string key = GenerateObjectKey(cnst);
602 MapAdd(GetGlobals(), key, cnst);
603 NewInstr(LOAD_GLOBAL);
604 code_.co_code.back()->set_name(key);
605 }
606
BuildOper(ValueNode * node,int index)607 void CodeGenerator::BuildOper(ValueNode *node, int index) {
608 static const std::set<int> not_value_oper = {
609 STORE_DEREF, DELETE_DEREF, STORE_GLOBAL, DELETE_GLOBAL, STORE_ATTR, DELETE_ATTR,
610 STORE_SUBSCR, DELETE_SUBSCR, IMPORT_STAR, RAISE_VARARGS, RERAISE,
611 };
612 static const std::unordered_map<int, int> const_arg_oper = {
613 {LIST_APPEND, 1}, {LIST_EXTEND, 1}, {DICT_MERGE, 1}, {DICT_UPDATE, 1}, {SET_UPDATE, 1}, {SET_ADD, 1}, {MAP_ADD, 2},
614 };
615
616 if (IsNonLocalValue(node)) {
617 return;
618 }
619
620 for (auto param : node->getInputs()) {
621 LoadValue(param);
622 }
623 int op = node->GetOpcode();
624 int arg = pijit::Opcode(op).HasArg() ? node->GetOparg() : 0;
625 auto const_arg_oper_iter = const_arg_oper.find(op);
626 if (const_arg_oper_iter != const_arg_oper.end()) {
627 arg = const_arg_oper_iter->second;
628 }
629 NewInstr(op, arg, node->GetLineNo());
630 code_.co_code.back()->set_name(node->GetName());
631
632 if (not_value_oper.find(op) != not_value_oper.end()) {
633 return;
634 }
635 if (nodes_alive_[node] == 0) {
636 NewInstr(POP_TOP);
637 } else {
638 NewInstr(STORE_FAST, AllocLocal(node, index), node->GetLineNo());
639 }
640 }
641
Init()642 void CodeGenerator::Init() {
643 const int size = SizeToInt(nodes_->inputs.size());
644 code_.co_nlocals = size;
645 for (int i = 0; i < size; ++i) {
646 ValueNode *param = nodes_->inputs[i];
647 locals_map_[param] = i;
648 MS_EXCEPTION_IF_CHECK_FAIL(!IsNonLocalValue(param), "got nonlocal parameter node: " + param->ToString());
649 }
650 }
651
Build()652 void CodeGenerator::Build() {
653 // build operations
654 MarkAlive();
655 for (size_t index = 0; index < nodes_->operations.size(); ++index) {
656 BuildOper(nodes_->operations[index], index);
657 }
658 SetLocalsCount(locals_map_.size());
659 }
660
GenReturn()661 void CodeGenerator::GenReturn() {
662 for (const auto &i : nodes_->outputs) {
663 LoadValue(i);
664 }
665 if (nodes_->outputs.size() > 1) {
666 NewInstr(BUILD_TUPLE, nodes_->outputs.size());
667 }
668 if (nodes_->outputs.size() == 0) {
669 NewInstr(LOAD_CONST, 0);
670 code_.co_code.back()->set_cnst(py::none());
671 }
672 NewInstr(RETURN_VALUE);
673 SetLocalsCount(locals_map_.size());
674 }
675
IsNotNeedTrack(const std::vector<std::unique_ptr<Instr>> & list,int start=-1)676 static bool IsNotNeedTrack(const std::vector<std::unique_ptr<Instr>> &list, int start = -1) {
677 if (list.empty() || start == -1) {
678 return true;
679 }
680 auto iter = std::find_if(list.begin() + start, list.end(), [](const std::unique_ptr<Instr> &i) {
681 return Opcode(i->op()).IsCall() || Opcode(i->op()).IsBinaryMath();
682 });
683 return iter == list.end();
684 }
685
MakeFunc(const py::object & code,const std::string & name,int closures)686 static std::vector<std::unique_ptr<Instr>> MakeFunc(const py::object &code, const std::string &name, int closures) {
687 std::vector<std::unique_ptr<Instr>> instrs;
688 for (int i = 0; i < closures; ++i) {
689 instrs.emplace_back(std::make_unique<Instr>(LOAD_CLOSURE, i));
690 }
691 unsigned make_oparg = 0;
692 if (closures != 0) {
693 make_oparg |= 0x08;
694 instrs.emplace_back(std::make_unique<Instr>(BUILD_TUPLE, closures));
695 }
696 instrs.emplace_back(std::make_unique<Instr>(LOAD_CONST, 0, code));
697 instrs.emplace_back(std::make_unique<Instr>(LOAD_CONST, 0, py::str(name)));
698 instrs.emplace_back(std::make_unique<Instr>(MAKE_FUNCTION, make_oparg));
699 return instrs;
700 }
701
GetClosureNames() const702 std::vector<std::string> CodeBreakGenerator::GetClosureNames() const {
703 std::vector<std::string> names;
704 for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(co_->co_cellvars); ++i) {
705 names.push_back(PyUnicode_AsUTF8(PyTuple_GET_ITEM(co_->co_cellvars, i)));
706 }
707 for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(co_->co_freevars); ++i) {
708 names.push_back(PyUnicode_AsUTF8(PyTuple_GET_ITEM(co_->co_freevars, i)));
709 }
710 return names;
711 }
712
MakeCapturedCode(std::vector<std::unique_ptr<Instr>> && load_oper,int argc,unsigned code_flag) const713 py::object CodeBreakGenerator::MakeCapturedCode(std::vector<std::unique_ptr<Instr>> &&load_oper, // prepare parameters
714 int argc, unsigned code_flag) const {
715 CodeGenerator code_gen(&captured_);
716 code_gen.SetGlobals(GetGlobals());
717 code_gen.Init();
718 code_gen.AddInstrs(std::move(load_oper));
719 code_gen.Build();
720 code_gen.GenReturn();
721
722 unsigned flags = co_->co_flags & ~(CO_VARARGS | CO_VARKEYWORDS);
723 code_gen.SetArgsInfo(argc, 0);
724 code_gen.SetCodeFlags(flags | code_flag);
725 code_gen.SetFirstLineNumber(captured_.operations[0]->GetLineNo());
726 code_gen.SetFreeVariableNames(GetClosureNames());
727 code_gen.SetCodeName(MakeCompiledName(py::str(co_->co_name)));
728 code_gen.SetFileName(py::cast<py::object>(co_->co_filename));
729
730 code_gen.EraseUnusedInstr();
731 py::object code = CodeGenerator::Transform(code_gen.GetCode());
732 auto parent = getJitCompileResults(reinterpret_cast<PyObject *>(co_), false);
733 JitCompileResults *child = getJitCompileResults(code.ptr());
734 child->stat = CodeExtra::GRAPH_CAPTURED;
735 child->conf = parent->conf;
736 child->tbs = parent->tbs;
737 return code;
738 }
739
CallCapturedCode(CodeGenerator * code_gen)740 void CodeBreakGenerator::CallCapturedCode(CodeGenerator *code_gen) {
741 if (captured_.operations.empty()) {
742 return;
743 }
744 GraphParameterBuilder param_info;
745 BuildGraphParameters(code_gen->GetLocalsMap(), ¶m_info);
746 int flag = (param_info.vargs_ ? CO_VARARGS : 0) | (param_info.kwargs_ ? CO_VARKEYWORDS : 0);
747 py::object code = MakeCapturedCode(std::move(param_info.sort_), param_info.args_.size(), flag);
748
749 int closures = PyTuple_GET_SIZE(co_->co_cellvars) + PyTuple_GET_SIZE(co_->co_freevars);
750 code_gen->AddInstrs(MakeFunc(code, "<pijit.compile>", closures));
751 code_gen->AddInstrs(std::move(param_info.load_));
752 if (flag) {
753 code_gen->NewInstr(CALL_FUNCTION_EX, static_cast<bool>(flag & CO_VARKEYWORDS));
754 } else {
755 code_gen->NewInstr(CALL_FUNCTION, param_info.args_.size());
756 }
757 extra_local_ = code_gen->AllocLocal(nullptr);
758 code_gen->NewInstr(STORE_FAST, extra_local_);
759 code_gen->AddInstrs(std::move(param_info.dele_));
760 }
761
FixInterpretOuput(CodeGenerator * code_gen)762 void CodeBreakGenerator::FixInterpretOuput(CodeGenerator *code_gen) {
763 if (captured_.outputs.empty()) {
764 return;
765 }
766 MS_EXCEPTION_IF_CHECK_FAIL(extra_local_ != -1, "can't find graph output");
767 code_gen->NewInstr(LOAD_FAST, extra_local_);
768 if (captured_.outputs.size() > 1) {
769 code_gen->NewInstr(UNPACK_SEQUENCE, captured_.outputs.size());
770 }
771 std::for_each(captured_.outputs.begin(), captured_.outputs.end(), [code_gen](ValueNode *i) {
772 // fill interpret local map
773 code_gen->NewInstr(STORE_FAST, code_gen->AllocLocal(i));
774 });
775 // reconstruct interpret values if need
776 }
777
RestoreStack(CodeGenerator * code_gen) const778 void CodeBreakGenerator::RestoreStack(CodeGenerator *code_gen) const {
779 auto begin = interpret_.outputs.begin();
780 auto end = interpret_.outputs.end() - alive_locals_.size();
781 std::for_each(begin, end, [code_gen](ValueNode *i) { code_gen->LoadValue(i); });
782 }
783
RestoreLocals(CodeGenerator * code_gen,bool only_load) const784 void CodeBreakGenerator::RestoreLocals(CodeGenerator *code_gen, bool only_load) const {
785 auto begin = interpret_.outputs.end() - alive_locals_.size();
786 auto end = interpret_.outputs.end();
787 if (only_load) {
788 std::for_each(begin, end, [code_gen](ValueNode *i) { code_gen->LoadValue(i); });
789 return;
790 }
791 std::vector<std::unique_ptr<Instr>> st;
792 auto index_iter = alive_locals_.begin();
793 for (auto node_iter = begin; node_iter != end; ++node_iter, ++index_iter) {
794 auto target = code_gen->GetLocalsMap().find(*node_iter);
795 if (target != code_gen->GetLocalsMap().end() && target->second == *index_iter) {
796 continue;
797 }
798 MS_EXCEPTION_IF_CHECK_FAIL(index_iter != alive_locals_.end(), "error alive local");
799 code_gen->LoadValue(*node_iter);
800 st.push_back(std::make_unique<Instr>(STORE_FAST, *index_iter));
801 }
802 std::reverse(st.begin(), st.end());
803 code_gen->AddInstrs(std::move(st));
804 }
805
MakeUntrackedCode(int untracked_bci,int untracked_stack_effect) const806 py::object CodeBreakGenerator::MakeUntrackedCode(int untracked_bci, int untracked_stack_effect) const {
807 const int argc = SizeToInt(interpret_.outputs.size()) + untracked_stack_effect;
808 int stack_count = argc - SizeToInt(alive_locals_.size());
809
810 std::vector<std::unique_ptr<Instr>> ld;
811 std::vector<std::unique_ptr<Instr>> st;
812 for (int i = 0; i < stack_count; ++i) {
813 ld.emplace_back(std::make_unique<Instr>(LOAD_FAST, i));
814 }
815 int index = stack_count;
816 for (auto iter = alive_locals_.begin(); iter != alive_locals_.end(); ++iter, ++index) {
817 if (*iter != index) {
818 ld.emplace_back(std::make_unique<Instr>(LOAD_FAST, index));
819 st.emplace_back(std::make_unique<Instr>(STORE_FAST, *iter));
820 }
821 }
822
823 std::vector<std::unique_ptr<Instr>> list = std::move(ld);
824 std::move(st.rbegin(), st.rend(), std::back_inserter(list));
825 std::vector<std::unique_ptr<Instr>> untracked = CodeGenerator::CopyInstr(GetCFG()->instr_pool(), untracked_bci);
826 int first_line = untracked[0]->bci();
827 std::move(untracked.begin(), untracked.end(), std::back_inserter(list));
828
829 int nlocals = GetCFG()->GetLocalCount();
830
831 CodeGenerator::Code ccode = {
832 argc,
833 0,
834 std::max(argc, nlocals),
835 (signed)co_->co_flags & ~(CO_VARARGS | CO_VARKEYWORDS),
836 first_line,
837 std::move(list),
838 py::cast<std::vector<std::string>>(co_->co_varnames),
839 std::vector<std::string>(),
840 GetClosureNames(),
841 MakeBrkName(PyUnicode_AsUTF8(co_->co_name), untracked_bci),
842 py::reinterpret_borrow<py::object>(co_->co_filename),
843 };
844 CodeGenerator::EraseUnusedInstr(&ccode.co_code);
845 py::object code = CodeGenerator::Transform(ccode);
846 auto parent = getJitCompileResults(reinterpret_cast<PyObject *>(co_), false);
847 CodeExtra *child = getJitCompileResults(code.ptr());
848 child->stat = CodeExtra::GRAPH_CANDIDATE;
849 child->conf = parent->conf;
850 child->tbs = parent->tbs;
851 return code;
852 }
853
ReconstructStack(CodeGenerator * code_gen,int untracked_bci,int untracked_stack_effect) const854 void CodeBreakGenerator::ReconstructStack(CodeGenerator *code_gen, int untracked_bci,
855 int untracked_stack_effect) const {
856 const auto &instr = GetCFG()->instr_pool()[break_bci_];
857 if (break_bci_ == untracked_bci) {
858 RestoreStack(code_gen);
859 return;
860 }
861 if (instr->op() != CALL_FUNCTION && instr->op() != CALL_FUNCTION_KW) {
862 RestoreStack(code_gen);
863 code_gen->AddInstrs(CodeGenerator::CopyInstr(cfg_->instr_pool(), break_bci_, untracked_bci));
864 return;
865 }
866
867 // (chaiyouheng): replace function call, mark function to compile ...
868 RestoreStack(code_gen);
869 code_gen->NewInstr(instr->op(), instr->arg(), instr->line());
870 }
871
BreakAtIf(CodeGenerator * code_gen) const872 void CodeBreakGenerator::BreakAtIf(CodeGenerator *code_gen) const {
873 const auto &list = GetCFG()->instr_pool();
874 int op = list[break_bci_]->op();
875 int stack_effect = -1;
876 int stack_count = SizeToInt(interpret_.outputs.size() - alive_locals_.size());
877 int closures = PyTuple_GET_SIZE(co_->co_cellvars) + PyTuple_GET_SIZE(co_->co_freevars);
878 py::object code;
879
880 MS_EXCEPTION_IF_CHECK_FAIL(stack_count >= 1, "error stack");
881
882 RestoreStack(code_gen);
883 code_gen->NewInstr(op);
884 Instr *if_instr = code_gen->GetCode().co_code.back().get();
885
886 // fall-branch
887 code = MakeUntrackedCode(break_bci_ + 1, stack_effect);
888 code_gen->AddInstrs(MakeFunc(code, "<pijit.resume>", closures));
889 code_gen->AddInstrs(CodeGenerator::RotStack(stack_count + stack_effect));
890 RestoreLocals(code_gen, true);
891 code_gen->NewInstr(CALL_FUNCTION, interpret_.outputs.size() + stack_effect);
892 code_gen->NewInstr(RETURN_VALUE);
893
894 // jump-branch
895 stack_effect = (op == JUMP_IF_TRUE_OR_POP || op == JUMP_IF_FALSE_OR_POP) ? 0 : -1;
896 code = MakeUntrackedCode(list[break_bci_]->extra_jump()->bci(), stack_effect);
897 auto jump_branch = MakeFunc(code, "<pijit.resume>", closures);
898 if_instr->set_extra_jump(jump_branch.begin()->get());
899 code_gen->AddInstrs(std::move(jump_branch));
900 code_gen->AddInstrs(CodeGenerator::RotStack(stack_count + stack_effect));
901 RestoreLocals(code_gen, true);
902 code_gen->NewInstr(CALL_FUNCTION, interpret_.outputs.size() + stack_effect);
903 code_gen->NewInstr(RETURN_VALUE);
904 }
905
BreakAtBlock(CodeGenerator * code_gen,int untracked_bci,int untracked_stack_effect)906 void CodeBreakGenerator::BreakAtBlock(CodeGenerator *code_gen, int untracked_bci, int untracked_stack_effect) {
907 RestoreStack(code_gen);
908 RestoreLocals(code_gen, false);
909 const auto &instr_list = GetCFG()->instr_pool();
910 code_gen->AddInstrs(CodeGenerator::CopyInstr(instr_list, break_bci_, untracked_bci));
911
912 BitMap alive = GetCFG()->liveness()->CollectAlive(untracked_bci);
913 BitMap defined(alive.size());
914 for (int i = break_bci_; i < untracked_bci; ++i) {
915 if (instr_list[i]->op() == STORE_FAST) {
916 defined.Set(instr_list[i]->arg());
917 }
918 }
919 std::for_each(alive_locals_.begin(), alive_locals_.end(), [&defined](int i) { defined.Set(i); });
920 alive.And(defined);
921
922 alive_locals_.clear();
923 for (BitMap::Iter iter(&alive, true), end(&alive, false); iter != end; ++iter) {
924 alive_locals_.push_back(*iter);
925 }
926
927 interpret_.outputs.resize(alive_locals_.size(), &ValueNode::kUnboundLocal);
928 untracked_stack_effect = 0;
929
930 py::object code = MakeUntrackedCode(untracked_bci, untracked_stack_effect);
931 int closures = PyTuple_GET_SIZE(co_->co_cellvars) + PyTuple_GET_SIZE(co_->co_freevars);
932 code_gen->AddInstrs(MakeFunc(code, "<pijit.resume>", closures));
933
934 for (auto i : alive_locals_) {
935 code_gen->NewInstr(LOAD_FAST, i);
936 }
937 code_gen->NewInstr(CALL_FUNCTION, interpret_.outputs.size() + untracked_stack_effect);
938 code_gen->NewInstr(RETURN_VALUE);
939 }
940
CallUntrackedCode(CodeGenerator * code_gen)941 void CodeBreakGenerator::CallUntrackedCode(CodeGenerator *code_gen) {
942 if (break_bci_ == -1) {
943 return;
944 }
945 const auto &list = GetCFG()->instr_pool();
946 int start_bci = break_bci_;
947 int start_op = list[start_bci]->op();
948
949 int untracked_bci;
950 int untracked_stack_effect;
951 bool find_block = FindBlock(start_bci, GetCFG(), &untracked_bci, &untracked_stack_effect);
952 untracked_bci++;
953 if (IsNotNeedTrack(GetCFG()->instr_pool(), std::min(untracked_bci + 1, SizeToInt(list.size())))) {
954 RestoreStack(code_gen);
955 RestoreLocals(code_gen, false);
956 code_gen->AddInstrs(CodeGenerator::CopyInstr(GetCFG()->instr_pool(), break_bci_));
957 return;
958 }
959 if (find_block) {
960 BreakAtBlock(code_gen, untracked_bci, untracked_stack_effect);
961 return;
962 }
963 if (start_op == JUMP_IF_FALSE_OR_POP || start_op == JUMP_IF_TRUE_OR_POP || start_op == POP_JUMP_IF_FALSE ||
964 start_op == POP_JUMP_IF_TRUE) {
965 BreakAtIf(code_gen);
966 return;
967 }
968 if (start_op != JUMP_ABSOLUTE && start_op != JUMP_FORWARD) {
969 MS_EXCEPTION_IF_CHECK_FAIL(list[start_bci]->extra_jump() == nullptr, "unexpected jump instruction");
970 // break at unsupported bytecode
971 untracked_stack_effect = PyCompile_OpcodeStackEffect(start_op, list[start_bci]->arg());
972 untracked_bci++;
973 }
974
975 py::object code = MakeUntrackedCode(untracked_bci, untracked_stack_effect);
976 int closures = PyTuple_GET_SIZE(co_->co_cellvars) + PyTuple_GET_SIZE(co_->co_freevars);
977 code_gen->AddInstrs(MakeFunc(code, "<pijit.resume>", closures));
978
979 ReconstructStack(code_gen, untracked_bci, untracked_stack_effect);
980 RestoreLocals(code_gen, true);
981
982 code_gen->NewInstr(CALL_FUNCTION, interpret_.outputs.size() + untracked_stack_effect);
983 code_gen->NewInstr(RETURN_VALUE);
984 }
985
MakeDispatchCode()986 py::object CodeBreakGenerator::MakeDispatchCode() {
987 auto jcr = getJitCompileResults(reinterpret_cast<PyObject *>(co_), false);
988
989 CodeGenerator code_gen(&interpret_);
990 code_gen.SetGlobals(GetGlobals());
991 code_gen.Init();
992 for (auto i : captured_.inputs) {
993 code_gen.MarkAlive(i);
994 }
995 code_gen.Build();
996
997 CallCapturedCode(&code_gen);
998 FixInterpretOuput(&code_gen);
999
1000 side_effect_handler_->Restore(&code_gen);
1001 interpret_.outputs.resize(interpret_.outputs.size() - side_effect_handler_->GetRequiredNodes().size());
1002
1003 CallUntrackedCode(&code_gen);
1004 MakeReturn(&code_gen);
1005
1006 std::string co_name = PyUnicode_AsUTF8(co_->co_name);
1007 co_name = std::to_string(jcr->IncCodeCount()) + "R." + co_name;
1008
1009 int nlocals = SizeToInt(code_gen.GetLocalsMap().size());
1010 nlocals = std::max(nlocals, co_->co_nlocals);
1011 nlocals = std::max(nlocals, cfg_->GetLocalCount());
1012
1013 ExtendCodeInfo(&code_gen, false);
1014 code_gen.SetLocalsCount(nlocals);
1015 code_gen.SetCodeName(co_name);
1016
1017 code_gen.EraseUnusedInstr();
1018 py::object result = CodeGenerator::Transform(code_gen.GetCode());
1019 return result;
1020 }
1021
MakeReturn(CodeGenerator * code_gen) const1022 void CodeBreakGenerator::MakeReturn(CodeGenerator *code_gen) const {
1023 if (break_bci_ != -1) {
1024 // call untracked nodes
1025 return;
1026 }
1027 if (captured_.operations.empty()) {
1028 // all values is interpret produce
1029 code_gen->GenReturn();
1030 return;
1031 }
1032 // not break graph, mix interpret and graph
1033 ValueNode *rv = interpret_.outputs[0];
1034 auto iter = code_gen->GetLocalsMap().find(rv);
1035 if (iter != code_gen->GetLocalsMap().end() || IsNonLocalValue(rv)) {
1036 code_gen->LoadValue(rv);
1037 code_gen->NewInstr(RETURN_VALUE);
1038 return;
1039 }
1040 MS_EXCEPTION_IF_CHECK_FAIL(captured_.outputs.size() == 1 && extra_local_ != -1,
1041 "can't find return value from interpret locals and graph locals");
1042 code_gen->NewInstr(LOAD_FAST, extra_local_);
1043 code_gen->NewInstr(RETURN_VALUE);
1044 }
1045
MakeCapturedCode() const1046 py::object CodeBreakGenerator::MakeCapturedCode() const {
1047 auto jcr = getJitCompileResults(reinterpret_cast<PyObject *>(co_), false);
1048
1049 CodeGenerator code_gen(&interpret_);
1050 code_gen.SetGlobals(GetGlobals());
1051 code_gen.Init();
1052 code_gen.Build();
1053 code_gen.GenReturn();
1054
1055 std::string co_name = MakeCompiledName(PyUnicode_AsUTF8(co_->co_name));
1056 co_name = std::to_string(jcr->IncCodeCount()) + "R." + co_name;
1057
1058 int nlocals = SizeToInt(code_gen.GetLocalsMap().size());
1059 nlocals = std::max(nlocals, co_->co_nlocals);
1060 nlocals = std::max(nlocals, cfg_->GetLocalCount());
1061
1062 ExtendCodeInfo(&code_gen, true);
1063 code_gen.SetLocalsCount(nlocals);
1064 code_gen.SetCodeName(co_name);
1065
1066 code_gen.EraseUnusedInstr();
1067 py::object result = CodeGenerator::Transform(code_gen.GetCode());
1068
1069 JitCompileResults *child = getJitCompileResults(result.ptr());
1070 child->stat = CodeExtra::GRAPH_CAPTURED;
1071 child->conf = jcr->conf;
1072 child->tbs = jcr->tbs;
1073 return result;
1074 }
1075
ExtendCodeInfo(CodeGenerator * cg,bool merge_kw_only) const1076 void CodeBreakGenerator::ExtendCodeInfo(CodeGenerator *cg, bool merge_kw_only) const {
1077 int argc = merge_kw_only ? (co_->co_argcount) + co_->co_kwonlyargcount : co_->co_argcount;
1078 int kw_only = merge_kw_only ? 0 : co_->co_kwonlyargcount;
1079
1080 cg->SetArgsInfo(argc, kw_only);
1081 cg->SetLocalsCount(co_->co_nlocals);
1082 cg->SetCodeFlags(co_->co_flags);
1083 cg->SetFirstLineNumber(co_->co_firstlineno);
1084 cg->SetVariableNames(py::cast<std::vector<std::string>>(co_->co_varnames));
1085 cg->SetCellVariableNames(py::cast<std::vector<std::string>>(co_->co_cellvars));
1086 cg->SetFreeVariableNames(py::cast<std::vector<std::string>>(co_->co_freevars));
1087 cg->SetFileName(py::reinterpret_borrow<py::object>(co_->co_filename));
1088 }
1089
Init(const Graph * graph,const GraphAnalyzer & analyzer)1090 void CodeBreakGenerator::Init(const Graph *graph, const GraphAnalyzer &analyzer) {
1091 alive_locals_ = analyzer.alive_locals();
1092 break_bci_ = graph->GetStopTraceBci();
1093 cfg_ = graph->GetCFG().get();
1094 const GraphAnalyzer::CapturedInfo &info = analyzer.GetCaptureInfo();
1095 interpret_.inputs = info.interpret_.inputs;
1096 interpret_.outputs = info.interpret_.outputs;
1097 interpret_.operations = info.interpret_.operations;
1098 captured_.inputs = info.captured_.inputs;
1099 captured_.outputs = info.captured_.outputs;
1100 captured_.operations = info.captured_.operations;
1101 graph_inputs_info_.args = info.graph_inputs_.args;
1102 graph_inputs_info_.vargs = info.graph_inputs_.vargs;
1103 graph_inputs_info_.kwargs = info.graph_inputs_.kwargs;
1104 graph_inputs_info_.globals = info.graph_inputs_.globals;
1105 side_effect_handler_ = graph->GetSideEffect();
1106
1107 size_t alive_count;
1108 if (break_bci_ != -1) {
1109 size_t stack_count = graph->GetFrame(break_bci_).GetStacks().size();
1110 size_t alive_locals_count = alive_locals_.size();
1111 size_t side_effect_required = side_effect_handler_->GetRequiredNodes().size();
1112 alive_count = stack_count + alive_locals_count + side_effect_required;
1113 } else {
1114 alive_count = 1 + side_effect_handler_->GetRequiredNodes().size();
1115 }
1116 MS_EXCEPTION_IF_CHECK_FAIL(alive_count == interpret_.outputs.size(), "error alive count");
1117
1118 if (analyzer.NeedInterpret()) {
1119 return;
1120 }
1121 // all parameters is graph support
1122 captured_.inputs.clear();
1123 captured_.outputs.clear();
1124 interpret_.operations = std::move(captured_.operations);
1125 }
1126
GetCFG() const1127 const CFG *CodeBreakGenerator::GetCFG() const { return cfg_; }
1128
BuildGraphParameters(const std::unordered_map<ValueNode *,int> & locals,GraphParameterBuilder * builder)1129 void CodeBreakGenerator::BuildGraphParameters(const std::unordered_map<ValueNode *, int> &locals,
1130 GraphParameterBuilder *builder) {
1131 // NOTE: if *vargs is cell variable, it is not parameter node
1132 MS_EXCEPTION_IF_CHECK_FAIL(co_->co_nlocals == SizeToInt(interpret_.inputs.size()),
1133 "interpret inputs must be same as locals");
1134
1135 builder->Init(graph_inputs_info_.args, graph_inputs_info_.globals, graph_inputs_info_.vargs,
1136 graph_inputs_info_.kwargs);
1137 builder->Build(locals);
1138
1139 size_t inputs_count = captured_.inputs.size();
1140 captured_.inputs = builder->args_;
1141 if (builder->vargs_ != nullptr) {
1142 captured_.inputs.push_back(builder->vargs_);
1143 }
1144 if (builder->kwargs_ != nullptr) {
1145 captured_.inputs.push_back(builder->kwargs_);
1146 }
1147 captured_.inputs.insert(captured_.inputs.end(), builder->globals_.begin(), builder->globals_.end());
1148 MS_EXCEPTION_IF_CHECK_FAIL(inputs_count == captured_.inputs.size(), "error parameters");
1149 }
1150
Key(int index,ValueNode * n)1151 std::string GraphParameterBuilder::Key(int index, ValueNode *n) {
1152 static uint64_t kId = 0;
1153 PyTypeObject *tp = n->GetVobj() ? n->GetVobj()->GetTypeObject() : nullptr;
1154 std::string descr = AObject::GetTypeDesc(n->GetVobj() ? n->GetVobj()->GetType() : AObject::kTypeAnyValue);
1155 std::stringstream s;
1156 s << "<" << index << ">" << (tp ? (tp->tp_name ? tp->tp_name : "<unnamed>") : descr) << "<" << (kId++) << ">";
1157 return s.str();
1158 }
1159
Init(const std::vector<ValueNode * > & args,const std::vector<ValueNode * > & globals,ValueNode * vargs,ValueNode * kwargs)1160 void GraphParameterBuilder::Init(const std::vector<ValueNode *> &args, const std::vector<ValueNode *> &globals,
1161 ValueNode *vargs, ValueNode *kwargs) {
1162 args_ = args;
1163 globals_ = globals;
1164 vargs_ = vargs;
1165 kwargs_ = kwargs;
1166 }
1167
Build(const std::unordered_map<ValueNode *,int> & locals)1168 void GraphParameterBuilder::Build(const std::unordered_map<ValueNode *, int> &locals) {
1169 auto Load = [&locals](ValueNode *param) {
1170 auto iter = locals.find(param);
1171 MS_EXCEPTION_IF_CHECK_FAIL(iter != locals.end(), "can't find graph parameters from interpret locals");
1172 return std::make_unique<Instr>(LOAD_FAST, iter->second);
1173 };
1174
1175 /**
1176 * graph parameter treat tuple, list, dict as constant
1177 * must be unpack these parameters and pack it by graph
1178 * if param is tuple or param is list:
1179 * TupleRebuild(param, &load_, &sort_, &args_)
1180 * if param is dict:
1181 * DictRebuild(param, &load_, &sort_, &args_)
1182 **/
1183 std::transform(args_.begin(), args_.end(), std::back_inserter(load_), Load);
1184
1185 const int argc = SizeToInt(args_.size()) + (vargs_ != nullptr) + (kwargs_ != nullptr);
1186 for (size_t i = 0; i < globals_.size(); ++i) {
1187 std::string name = GraphParameterBuilder::Key(i, globals_[i]);
1188 load_.emplace_back(Load(globals_[i]));
1189 load_.emplace_back(std::make_unique<Instr>(STORE_GLOBAL, 0, name));
1190 dele_.emplace_back(std::make_unique<Instr>(DELETE_GLOBAL, 0, name));
1191 sort_.emplace_back(std::make_unique<Instr>(LOAD_GLOBAL, 0, name));
1192 sort_.emplace_back(std::make_unique<Instr>(STORE_FAST, argc + i));
1193 }
1194 if (vargs_) {
1195 BuildVargs(locals);
1196 }
1197 if (kwargs_) {
1198 BuildKwVargs(locals);
1199 }
1200 }
1201
BuildVargs(const std::unordered_map<ValueNode *,int> & locals)1202 void GraphParameterBuilder::BuildVargs(const std::unordered_map<ValueNode *, int> &locals) {
1203 auto iter = locals.find(vargs_);
1204 MS_EXCEPTION_IF_CHECK_FAIL(iter != locals.end(), "can't find graph parameters from interpret locals");
1205 if (args_.size() == 0) {
1206 load_.push_back(std::make_unique<Instr>(LOAD_FAST, iter->second));
1207 return;
1208 }
1209
1210 load_.push_back(std::make_unique<Instr>(BUILD_LIST, args_.size()));
1211 load_.push_back(std::make_unique<Instr>(LOAD_FAST, iter->second));
1212 #if PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION < 9
1213 const int tuple_unpack_arg = 2;
1214 load_.push_back(std::make_unique<Instr>(BUILD_TUPLE_UNPACK, tuple_unpack_arg));
1215 #else
1216 load_.push_back(std::make_unique<Instr>(LIST_EXTEND, 1));
1217 load_.push_back(std::make_unique<Instr>(LIST_TO_TUPLE, 0));
1218 #endif
1219 }
1220
BuildKwVargs(const std::unordered_map<ValueNode *,int> & locals)1221 void GraphParameterBuilder::BuildKwVargs(const std::unordered_map<ValueNode *, int> &locals) {
1222 auto iter = locals.find(kwargs_);
1223 MS_EXCEPTION_IF_CHECK_FAIL(iter != locals.end(), "can't find graph parameters from interpret locals");
1224
1225 if (vargs_ == nullptr) {
1226 // only kwargs
1227 load_.push_back(std::make_unique<Instr>(BUILD_TUPLE, args_.size()));
1228 }
1229 load_.push_back(std::make_unique<Instr>(LOAD_FAST, iter->second));
1230 }
1231
1232 // e.g. while..., for..., while...else..., for...else...,
FindLoopEnd(int start,const CFG * cfg)1233 static int FindLoopEnd(int start, const CFG *cfg) {
1234 Block *loop_begin = cfg->GetBlockByBci(start);
1235 if (!loop_begin->is_loop_head()) {
1236 return start - 1;
1237 }
1238
1239 const auto &instrs = cfg->instr_pool();
1240 int loop_exit = loop_begin->begin_ci();
1241 int target = loop_begin->GetJumpBB() ? loop_begin->GetJumpBB()->begin_ci() : loop_exit;
1242 // find loop last exit
1243 for (; loop_exit != target; ++loop_exit) {
1244 Instr *jump = instrs[loop_exit]->extra_jump();
1245 if (jump == nullptr) {
1246 continue;
1247 }
1248 if (target < jump->bci()) {
1249 // if jump forward out of loop branch target, reset target
1250 target = jump->bci();
1251 }
1252 }
1253 // find last backward edge, get next instruction
1254 int result = 0;
1255 for (auto i : loop_begin->pred_bbs()) {
1256 result = std::max(result, i->end_ci());
1257 }
1258 return std::max(result, target) - 1;
1259 }
1260
1261 #if (PY_MAJOR_VERSION == 3) && (PY_MINOR_VERSION < 9)
1262
FindTryBlockEnd(int start_bci,const CFG * cfg)1263 static size_t FindTryBlockEnd(int start_bci, const CFG *cfg) {
1264 const auto &list = cfg->instr_pool();
1265 size_t block_end = list[start_bci]->extra_jump()->bci();
1266 for (; block_end < list.size() && list[block_end]->op() != END_FINALLY; ++block_end) {
1267 }
1268 if (list[block_end - 1]->extra_jump()) {
1269 size_t jump = list[block_end - 1]->extra_jump()->bci();
1270 block_end = std::max(block_end, jump);
1271 }
1272 return block_end;
1273 }
1274
FindBlock(int start_bci,const CFG * cfg,int * end_bci,int * stack_effect)1275 static bool FindBlock(int start_bci, const CFG *cfg, int *end_bci, int *stack_effect) {
1276 const auto &list = cfg->instr_pool();
1277 size_t block_end = 0;
1278 *stack_effect = 0;
1279 int opcode = list[start_bci]->op();
1280 if (opcode == Opcode::k_ILLEGAL_OPCODE) {
1281 MS_LOG(INTERNAL_EXCEPTION) << "shouldn't reach here";
1282
1283 #if (PY_MAJOR_VERSION == 3) && (PY_MINOR_VERSION == 7)
1284 } else if (opcode == SETUP_EXCEPT) {
1285 block_end = FindTryBlockEnd(start_bci, cfg);
1286 } else if (opcode == SETUP_LOOP) {
1287 block_end = list[start_bci]->extra_jump()->bci() - 1;
1288 } else if (opcode == FOR_ITER) {
1289 block_end = FindLoopEnd(start_bci, cfg);
1290 *stack_effect = -1;
1291 #endif
1292 #if (PY_MAJOR_VERSION == 3) && (PY_MINOR_VERSION == 8)
1293 } else if (opcode == BEGIN_FINALLY || opcode == CALL_FINALLY) {
1294 MS_EXCEPTION_IF_CHECK_FAIL(false, "shouldn't reach here, must be break at SETUP_FINALLY");
1295 } else if (opcode == FOR_ITER) {
1296 block_end = FindLoopEnd(start_bci, cfg);
1297 *stack_effect = -1;
1298 #endif
1299 } else if (opcode == SETUP_WITH || opcode == SETUP_FINALLY) {
1300 if (opcode == SETUP_WITH) {
1301 *stack_effect = -1;
1302 }
1303 block_end = FindTryBlockEnd(start_bci, cfg);
1304 } else {
1305 block_end = FindLoopEnd(start_bci, cfg);
1306 }
1307 if (list[start_bci]->op() == FOR_ITER && SizeToInt(block_end) == start_bci - 1) {
1308 // break at FOR_ITER and it is not a loop
1309 block_end = list[start_bci]->extra_jump()->bci() - 1;
1310 }
1311 *end_bci = block_end;
1312 return SizeToInt(block_end) != start_bci - 1;
1313 }
1314
1315 #else
1316
FindWithBlockEnd(int start_bci,const CFG * cfg)1317 static int FindWithBlockEnd(int start_bci, const CFG *cfg) {
1318 const auto &list = cfg->instr_pool();
1319 size_t tar = (size_t)list[start_bci]->extra_jump()->bci();
1320 bool validate = tar + 1 < list.size() && list[tar]->op() == WITH_EXCEPT_START && list[tar + 1]->extra_jump();
1321 MS_EXCEPTION_IF_CHECK_FAIL(validate, "can't find with block");
1322 return list[tar - 1]->extra_jump() ? list[tar - 1]->extra_jump()->bci() - 1 : list.back()->bci();
1323 }
1324
1325 // finally block has two copies in bytecodes, only test for Python3.9
FindFinallyBlockEnd(int raise_block,int normal_block,const CFG * cfg)1326 static int FindFinallyBlockEnd(int raise_block, int normal_block, const CFG *cfg) {
1327 const auto &list = cfg->instr_pool();
1328 MS_EXCEPTION_IF_CHECK_FAIL(normal_block < SizeToInt(list.size()) && list[normal_block]->op() == POP_BLOCK,
1329 "can't find finally block");
1330 auto i = normal_block + 1;
1331 auto j = raise_block;
1332 for (; list[i]->op() == list[j]->op(); ++i, ++j) {
1333 }
1334 // only python3.9, list[i]->op() == JUMP_FORWARD && list[j]->op() == RERAISE;
1335 return j;
1336 }
1337
FindTryBlockEnd(int start,const CFG * cfg)1338 static int FindTryBlockEnd(int start, const CFG *cfg) {
1339 const auto &list = cfg->instr_pool();
1340 Instr *tar = list[start]->extra_jump();
1341 MS_EXCEPTION_IF_NULL(tar);
1342
1343 size_t res = (size_t)tar->bci();
1344 if (tar->op() == DUP_TOP) {
1345 // try block without finally
1346 MS_EXCEPTION_IF_CHECK_FAIL(res + 2 < list.size(), "can't find try block");
1347 while (res < list.size() && list[res]->op() != RERAISE) {
1348 res = list[res + 2]->extra_jump()->bci();
1349 }
1350 if (list[res - 1]->op() == JUMP_FORWARD) {
1351 res = list[res - 1]->extra_jump()->bci();
1352 }
1353 return res;
1354 }
1355 // finally block has two copies in bytecodes, first is normally and end with JUMP_FORWARD, second is end with RERAISE
1356 int reraise_finally_block_start = tar->bci();
1357 if (start + 1 < SizeToInt(list.size()) && list[start + 1]->op() != SETUP_FINALLY) {
1358 // Handle try without exception scene.
1359 res = start;
1360 } else {
1361 MS_EXCEPTION_IF_CHECK_FAIL(start + 1 < SizeToInt(list.size()) && list[start + 1]->op() == SETUP_FINALLY,
1362 "can't find finally block");
1363 res = IntToSize(list[start + 1]->extra_jump()->bci());
1364 while (res < list.size() && list[res]->op() != RERAISE) {
1365 res = list[res + 2]->extra_jump()->bci();
1366 }
1367 }
1368 /*
1369 In the current situation:
1370 try:
1371 ...
1372 else:
1373 ...
1374 finally:
1375 ...
1376 this codes have a else block, wo should find byteCode 'POP_BLOCK'
1377 */
1378 while (res < list.size() && list[res]->op() != POP_BLOCK) {
1379 res++;
1380 }
1381 return FindFinallyBlockEnd(reraise_finally_block_start, res, cfg);
1382 }
1383
FindBlock(int start_bci,const CFG * cfg,int * end_bci,int * stack_effect)1384 static bool FindBlock(int start_bci, const CFG *cfg, int *end_bci, int *stack_effect) {
1385 const std::vector<std::unique_ptr<Instr>> &list = cfg->instr_pool();
1386 *stack_effect = 0;
1387 int opcode = list[start_bci]->op();
1388 if (opcode == SETUP_FINALLY) {
1389 *end_bci = FindTryBlockEnd(start_bci, cfg);
1390 return true;
1391 } else if (opcode == SETUP_WITH) {
1392 *end_bci = FindWithBlockEnd(start_bci, cfg);
1393 *stack_effect = -1;
1394 return true;
1395 } else if (opcode == FOR_ITER) {
1396 *stack_effect = -1;
1397 }
1398 *end_bci = FindLoopEnd(start_bci, cfg);
1399 if (list[start_bci]->op() == FOR_ITER && *end_bci == start_bci - 1) {
1400 // break at FOR_ITER and it is not a loop
1401 *end_bci = list[start_bci]->extra_jump()->bci() - 1;
1402 }
1403 return *end_bci != start_bci - 1;
1404 }
1405 #endif
1406
MakeCodeFromCodeGen(const GraphBuilderPtr & builder,const GraphAnalyzerPtr & analyzer,PyObject * globals)1407 py::object MakeCodeFromCodeGen(const GraphBuilderPtr &builder, const GraphAnalyzerPtr &analyzer, PyObject *globals) {
1408 TimeRecorder time_recorder(__FUNCTION__, kPIJitConfigDefault.GetBoolConfig(GraphJitConfig::kLogPerf));
1409
1410 auto graph = builder->GetGraph();
1411 auto cg = CodeBreakGenerator::Creator(builder, graph->GetCodeObj());
1412 cg->Init(graph, *analyzer);
1413 cg->SetGlobals(py::cast<py::dict>(globals));
1414 py::object code = analyzer->NeedInterpret() ? cg->MakeDispatchCode() : cg->MakeCapturedCode();
1415 return code;
1416 }
1417
PrintInstr(const std::vector<std::unique_ptr<Instr>> & list)1418 std::string PrintInstr(const std::vector<std::unique_ptr<Instr>> &list) {
1419 std::stringstream s;
1420 for (const auto &i : list) {
1421 s << i->ToString() << "\n";
1422 }
1423 return s.str();
1424 }
1425
PrintNodeSet(const NodeSet & nodes)1426 std::string PrintNodeSet(const NodeSet &nodes) {
1427 std::stringstream s;
1428 s << "inputs: \n";
1429 for (auto i : nodes.inputs) {
1430 s << i->ToString() << "\n";
1431 }
1432 s << "outputs: \n";
1433 for (auto i : nodes.outputs) {
1434 s << i->ToString() << "\n";
1435 }
1436 s << "operations: \n";
1437 for (auto i : nodes.operations) {
1438 s << i->ToString() << "\n";
1439 }
1440 return s.str();
1441 }
1442
MakeCapturedCode(std::vector<std::unique_ptr<Instr>> && load,int argc,unsigned code_flag) const1443 py::object MindCodeBreakGenerator::MakeCapturedCode(std::vector<std::unique_ptr<Instr>> &&load, int argc,
1444 unsigned code_flag) const {
1445 // a stub to call graph
1446 py::object res = this->CodeBreakGenerator::MakeCapturedCode(std::move(load), argc, code_flag);
1447 auto jcr = getJitCompileResults(reinterpret_cast<PyObject *>(co_));
1448 if (jcr->conf->GetBoolConfig(GraphJitConfig::kInterpretCapturedCode)) {
1449 return res;
1450 }
1451 auto name = PyUnicode_AsUTF8(reinterpret_cast<PyCodeObject *>(res.ptr())->co_name);
1452 Compile(name, argc, 0, code_flag, res);
1453 return res;
1454 }
1455
MakeCapturedCode() const1456 py::object MindCodeBreakGenerator::MakeCapturedCode() const {
1457 auto jcr = getJitCompileResults(reinterpret_cast<PyObject *>(co_));
1458 if (jcr->conf->GetBoolConfig(GraphJitConfig::kInterpretCapturedCode)) {
1459 return this->CodeBreakGenerator::MakeCapturedCode();
1460 }
1461 auto name = std::to_string(jcr->IncCodeCount()) + "R." + MakeCompiledName(PyUnicode_AsUTF8(co_->co_name));
1462 Compile(name, co_->co_argcount, co_->co_kwonlyargcount, co_->co_flags, py::object());
1463 return py::object();
1464 }
1465
Compile(const std::string & co_name,int co_argcount,int co_kwonlyargcount,int co_flags,const py::object & stub) const1466 void MindCodeBreakGenerator::Compile(const std::string &co_name, int co_argcount, int co_kwonlyargcount, int co_flags,
1467 const py::object &stub) const {
1468 TimeRecorder compile_time("MindCodeCompile", kPIJitConfigDefault.GetBoolConfig(GraphJitConfig::kLogGuardPerf));
1469
1470 // Compile graph.
1471 auto b = std::dynamic_pointer_cast<MindGraphBuilder>(builder_);
1472 MS_EXCEPTION_IF_NULL(b);
1473 FGBuilder()->ClearNodeAbstract();
1474 FGBuilder()->SetGraphName(co_name);
1475 auto func_graph = FGBuilder()->graph();
1476 if (func_graph == nullptr) {
1477 MS_LOG(EXCEPTION) << "Get function graph from function graph builder failed.";
1478 }
1479 std::string phase =
1480 py::cast<std::string>(co_->co_filename) + "_" + std::to_string(co_->co_firstlineno) + "_" + co_name;
1481 const auto ¶meters = func_graph->parameters();
1482 py::tuple args(parameters.size() - func_graph->fv_param_count());
1483 for (size_t i = 0; i < parameters.size(); ++i) {
1484 auto para = parameters[i]->cast<ParameterPtr>();
1485 MS_EXCEPTION_IF_NULL(para);
1486 if (para->has_default()) {
1487 continue;
1488 }
1489 phase += "_" + para->abstract()->ToString();
1490 args[i] = *(para->user_data<py::object>("pi_jit_py_obj"));
1491 }
1492 phase += ".pi_jit";
1493 MindCompiler::CompileInfo compile_info{co_name, co_argcount, co_kwonlyargcount, co_flags};
1494 CallableGraph callable = mindspore::pijit::MindCompiler::Compile(func_graph, args, py::dict(), phase, compile_info);
1495 // Set NativeFunc.
1496 auto parent = getJitCompileResults(reinterpret_cast<PyObject *>(co_), false);
1497 if (stub.ptr() == nullptr) {
1498 parent->code->SetNativeFunc(phase, callable, nullptr);
1499 parent->stat = CodeExtra::GRAPH_CALLABLE;
1500 } else {
1501 JitCompileResults *child = getJitCompileResults(stub.ptr());
1502 MS_EXCEPTION_IF_CHECK_FAIL(child->code == nullptr, "must be a new stub code");
1503 child->code = child->codehub->AddOptTarget(OptOption::CreateOptionByPoint(child));
1504 child->code->SetNativeFunc(phase, callable, nullptr);
1505 child->stat = CodeExtra::GRAPH_CALLABLE;
1506 child->conf = parent->conf;
1507 child->tbs = parent->tbs;
1508 }
1509 }
1510
1511 } // namespace pijit
1512 } // namespace mindspore
1513