1 /** 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef COMPILER_OPTIMIZER_IR_IR_CONSTRUCTOR_H 17 #define COMPILER_OPTIMIZER_IR_IR_CONSTRUCTOR_H 18 19 #include <memory> 20 #include "graph.h" 21 #include "graph_checker.h" 22 #include "mark_word.h" 23 #include "optimizer/ir_builder/inst_builder.h" 24 25 namespace panda::compiler { 26 /** 27 * This class aims to simplify IR construction. 28 * 29 * Example: 30 * auto g = CreateEmptyGraph(); 31 * GRAPH(g) { 32 * BASIC_BLOCK(0, 1, 2) { 33 * INST(0, Opcode::IntConstant).Constant(12); 34 * INST(1, Opcode::IntConstant).Constant(12); 35 * INST(2, Opcode::Add).Inputs(0, 1); 36 * INST(6, Opcode::Compare).Inputs(2).CC(ConditionCode::CC_AE); 37 * INST(7, Opcode::If).Inputs(6); 38 * } 39 * BASIC_BLOCK(1, 2) { 40 * INST(3, Opcode::Not).Inputs(0); 41 * } 42 * BASIC_BLOCK(2, -1) { 43 * INST(4, Opcode::Phi).Inputs(2, 3); 44 * INST(5, Opcode::Not).Inputs(4); 45 * } 46 * } 47 * g->Dump(cerr); 48 * 49 * GRAPH(g) macro initializies Builder object by 'g' graph. Builder works with this graph only inside followed scope 50 * in braces. 51 * BASIC_BLOCK creates new basic block and add it to the current graph. All code inside followed scope will work with 52 * this basic block. 53 * First argument is ID of basic block. It must be unique for graph. 54 * All remaining arguments are IDs of successors blocks. '-1' value means that there is no successor. Block 55 * that hasn't successors is considered as end block. 56 * Block with '0' ID is considered as start block. 57 * INST creates new instruction and append it to the current basic block. 58 * First parameter is ID of instruction. It must be unique within the current graph 59 * Second parameter is an opcode. 60 * Dataflow can be constructed via 'Inputs' method, that gets IDs of the input instructions as parameters. 61 * All other properties of instruction may be set via corresponding proxy methods, defined in Builder. 62 */ 63 class IrConstructor final { 64 public: 65 static const size_t ID_ENTRY_BB = 0; 66 static const size_t ID_EXIT_BB = 1U; 67 68 static constexpr DataType::Type MARK_WORD_TYPE = DataType::GetIntTypeBySize(sizeof(MarkWord::markWordSize)); 69 IrConstructor()70 IrConstructor() : aa_(SpaceType::SPACE_TYPE_COMPILER) {} 71 SetGraph(Graph * graph)72 IrConstructor &SetGraph(Graph *graph) 73 { 74 graph_ = graph; 75 if (graph_->GetStartBlock() == nullptr) { 76 graph_->CreateStartBlock(); 77 } 78 if (graph_->GetEndBlock() == nullptr) { 79 graph_->CreateEndBlock(0U); 80 } 81 ASSERT(graph_->GetVectorBlocks().size() == 2U); 82 bb_map_.clear(); 83 bb_map_[ID_ENTRY_BB] = graph_->GetStartBlock(); 84 bb_map_[ID_EXIT_BB] = graph_->GetEndBlock(); 85 bb_succs_map_.clear(); 86 inst_map_.clear(); 87 inst_inputs_map_.clear(); 88 save_state_inst_vregs_map_.clear(); 89 phi_inst_inputs_map_.clear(); 90 return *this; 91 } 92 93 template <size_t id> NewBlock()94 IrConstructor &NewBlock() 95 { 96 ASSERT(id != ID_ENTRY_BB && id != ID_EXIT_BB); 97 ASSERT(bb_map_.count(id) == 0); 98 ASSERT(CurrentBb() == nullptr); 99 auto bb = graph_->GetAllocator()->New<BasicBlock>(graph_); 100 bb->SetGuestPc(0U); 101 #ifdef NDEBUG 102 graph_->AddBlock(bb); 103 #else 104 graph_->AddBlock(bb, id); 105 #endif 106 current_bb_ = {id, bb}; 107 bb_map_[id] = bb; 108 // add connection the first custom block with entry 109 if (bb_succs_map_.empty()) { 110 graph_->GetStartBlock()->AddSucc(bb); 111 } 112 return *this; 113 } 114 115 template <typename... Args> NewInst(size_t id,Args &&...args)116 IrConstructor &NewInst(size_t id, Args &&... args) 117 { 118 ASSERT_DO(inst_map_.find(id) == inst_map_.end(), 119 std::cerr << "Instruction with same Id " << id << "already exists"); 120 auto inst = graph_->CreateInst(std::forward<Args>(args)...); 121 inst->SetId(id); 122 for (size_t i = 0; i < inst->GetInputsCount(); ++i) { 123 inst->SetSrcReg(i, INVALID_REG); 124 } 125 current_inst_ = {id, inst}; 126 inst_map_[id] = inst; 127 auto block = CurrentBb(); 128 if (block == nullptr) { 129 block = graph_->GetStartBlock(); 130 } 131 ASSERT(block); 132 if (inst->IsPhi()) { 133 block->AppendPhi(inst); 134 } else { 135 block->AppendInst(inst); 136 } 137 #ifndef NDEBUG 138 if (inst->IsLowLevel()) { 139 // GraphChecker hack: LowLevel instructions may appear only after Lowering pass: 140 graph_->SetLowLevelInstructionsEnabled(); 141 } 142 #endif 143 if (inst->IsSaveState()) { 144 graph_->SetVRegsCount(std::max(graph_->GetVRegsCount(), sizeof...(args))); 145 } 146 return *this; 147 } 148 149 template <typename T> NewConstant(size_t id,T value)150 IrConstructor &NewConstant(size_t id, [[maybe_unused]] T value) 151 { 152 ASSERT_DO(inst_map_.find(id) == inst_map_.end(), 153 std::cerr << "Instruction with same Id " << id << "already exists"); 154 Inst *inst = nullptr; 155 auto to_start_bb = (CurrentBbIndex() == 0) || (CurrentBbIndex() == -1); 156 if (to_start_bb) { 157 inst = graph_->FindOrCreateConstant(value); 158 } else { 159 inst = graph_->CreateInstConstant(value, graph_->IsBytecodeOptimizer()); 160 CurrentBb()->AppendInst(inst); 161 } 162 inst->SetId(id); 163 inst_map_[id] = inst; 164 current_inst_ = {id, inst}; 165 return *this; 166 } 167 NewParameter(int id,uint16_t arg_number)168 IrConstructor &NewParameter(int id, uint16_t arg_number) 169 { 170 ASSERT_DO(inst_map_.find(id) == inst_map_.end(), 171 std::cerr << "Instruction with same Id " << id << "already exists"); 172 auto inst = graph_->AddNewParameter(arg_number); 173 inst->SetId(id); 174 inst_map_[id] = inst; 175 current_inst_ = {id, inst}; 176 return *this; 177 } 178 Succs(std::vector<int> succs)179 IrConstructor &Succs(std::vector<int> succs) 180 { 181 bb_succs_map_.emplace_back(CurrentBbIndex(), std::move(succs)); 182 return *this; 183 } 184 185 /// Define inputs for current instruction. 186 /// Input is an index of input instruction. 187 template <typename... Args> Inputs(Args...inputs)188 IrConstructor &Inputs(Args... inputs) 189 { 190 ASSERT(!CurrentInst()->IsCall() && !CurrentInst()->IsIntrinsic()); 191 inst_inputs_map_[CurrentInstIndex()].reserve(sizeof...(inputs)); 192 if constexpr (sizeof...(inputs) != 0) { 193 AddInput(inputs...); 194 } 195 return *this; 196 } 197 198 /// Define inputs for current call-, intrinsic-, or phi-instriction. 199 /// Input is defined by std::pair. 200 /// In case of phi: first is index of basic block, second is index of input instruction. 201 /// In case of call and intrinsic: first is Type of input, second is index of input instruction. Inputs(std::initializer_list<std::pair<int,int>> inputs)202 IrConstructor &Inputs(std::initializer_list<std::pair<int, int>> inputs) 203 { 204 ASSERT(CurrentInst()->IsPhi() || CurrentInst()->IsCall() || CurrentInst()->IsInitObject() || 205 CurrentInst()->IsIntrinsic()); 206 if (CurrentInst()->IsPhi()) { 207 phi_inst_inputs_map_[CurrentInstIndex()].reserve(inputs.size()); 208 for (const auto &input : inputs) { 209 phi_inst_inputs_map_[CurrentInstIndex()].push_back(input); 210 } 211 } else { 212 auto opc = CurrentInst()->GetOpcode(); 213 InputTypesMixin *types; 214 switch (opc) { 215 case Opcode::Intrinsic: 216 types = static_cast<InputTypesMixin *>(CurrentInst()->CastToIntrinsic()); 217 break; 218 case Opcode::CallIndirect: 219 types = static_cast<InputTypesMixin *>(CurrentInst()->CastToCallIndirect()); 220 break; 221 case Opcode::Builtin: 222 types = static_cast<InputTypesMixin *>(CurrentInst()->CastToBuiltin()); 223 break; 224 case Opcode::InitObject: 225 types = static_cast<InputTypesMixin *>(CurrentInst()->CastToInitObject()); 226 break; 227 default: 228 ASSERT(CurrentInst()->IsCall()); 229 types = static_cast<InputTypesMixin *>(static_cast<CallInst *>(CurrentInst())); 230 break; 231 } 232 233 inst_inputs_map_[CurrentInstIndex()].reserve(inputs.size()); 234 types->AllocateInputTypes(graph_->GetAllocator(), inputs.size()); 235 for (const auto &input : inputs) { 236 types->AddInputType(static_cast<DataType::Type>(input.first)); 237 inst_inputs_map_[CurrentInstIndex()].push_back(input.second); 238 } 239 } 240 return *this; 241 } 242 243 /// Same as the default Inputs() method, but defines inputs' types with respect to call-instructions. 244 /// Copies types of inputs to the instruction's input_types_. 245 template <typename... Args> InputsAutoType(Args...inputs)246 IrConstructor &InputsAutoType(Args... inputs) 247 { 248 ASSERT(CurrentInst()->IsCall()); 249 auto *call_inst = static_cast<CallInst *>(CurrentInst()); 250 call_inst->AllocateInputTypes(graph_->GetAllocator(), sizeof...(inputs)); 251 ((call_inst->AddInputType(GetInst(inputs).GetType())), ...); 252 inst_inputs_map_[CurrentInstIndex()].reserve(sizeof...(inputs)); 253 ((inst_inputs_map_[CurrentInstIndex()].push_back(inputs)), ...); 254 return *this; 255 } 256 Pc(uint32_t pc)257 IrConstructor &Pc(uint32_t pc) 258 { 259 ASSERT(CurrentInst()); 260 CurrentInst()->SetPc(pc); 261 return *this; 262 } 263 264 IrConstructor &Volatile(bool volat = true) 265 { 266 auto inst = CurrentInst(); 267 switch (inst->GetOpcode()) { 268 case Opcode::StoreObject: 269 inst->CastToStoreObject()->SetVolatile(volat); 270 break; 271 case Opcode::LoadObject: 272 inst->CastToLoadObject()->SetVolatile(volat); 273 break; 274 case Opcode::StoreStatic: 275 inst->CastToStoreStatic()->SetVolatile(volat); 276 break; 277 case Opcode::LoadStatic: 278 inst->CastToLoadStatic()->SetVolatile(volat); 279 break; 280 case Opcode::Store: 281 inst->CastToStore()->SetVolatile(volat); 282 break; 283 case Opcode::Load: 284 inst->CastToLoad()->SetVolatile(volat); 285 break; 286 case Opcode::StoreI: 287 inst->CastToStoreI()->SetVolatile(volat); 288 break; 289 case Opcode::LoadI: 290 inst->CastToLoadI()->SetVolatile(volat); 291 break; 292 default: 293 UNREACHABLE(); 294 } 295 return *this; 296 } 297 IsArray(bool value)298 IrConstructor &IsArray(bool value) 299 { 300 auto inst = CurrentInst(); 301 switch (inst->GetOpcode()) { 302 case Opcode::LoadArray: 303 inst->CastToLoadArray()->SetIsArray(value); 304 break; 305 case Opcode::LenArray: 306 inst->CastToLenArray()->SetIsArray(value); 307 break; 308 default: 309 UNREACHABLE(); 310 } 311 return *this; 312 } 313 CC(ConditionCode cc)314 IrConstructor &CC(ConditionCode cc) 315 { 316 auto inst = CurrentInst(); 317 switch (inst->GetOpcode()) { 318 case Opcode::Compare: 319 inst->CastToCompare()->SetCc(cc); 320 break; 321 case Opcode::If: 322 inst->CastToIf()->SetCc(cc); 323 break; 324 case Opcode::AddOverflow: 325 inst->CastToAddOverflow()->SetCc(cc); 326 break; 327 case Opcode::SubOverflow: 328 inst->CastToSubOverflow()->SetCc(cc); 329 break; 330 case Opcode::IfImm: 331 inst->CastToIfImm()->SetCc(cc); 332 break; 333 case Opcode::Select: 334 inst->CastToSelect()->SetCc(cc); 335 break; 336 case Opcode::SelectImm: 337 inst->CastToSelectImm()->SetCc(cc); 338 break; 339 case Opcode::DeoptimizeCompare: 340 inst->CastToDeoptimizeCompare()->SetCc(cc); 341 break; 342 case Opcode::DeoptimizeCompareImm: 343 inst->CastToDeoptimizeCompareImm()->SetCc(cc); 344 break; 345 default: 346 UNREACHABLE(); 347 } 348 return *this; 349 } 350 SetFlag(compiler::inst_flags::Flags flag)351 IrConstructor &SetFlag(compiler::inst_flags::Flags flag) 352 { 353 CurrentInst()->SetFlag(flag); 354 return *this; 355 } 356 ClearFlag(compiler::inst_flags::Flags flag)357 IrConstructor &ClearFlag(compiler::inst_flags::Flags flag) 358 { 359 CurrentInst()->ClearFlag(flag); 360 return *this; 361 } 362 Inlined()363 IrConstructor &Inlined() 364 { 365 auto inst = CurrentInst(); 366 if (inst->GetOpcode() == Opcode::Intrinsic) { 367 inst->CastToIntrinsic()->SetInlined(true); 368 return *this; 369 } 370 ASSERT(inst->GetOpcode() == Opcode::CallStatic || inst->GetOpcode() == Opcode::CallVirtual); 371 static_cast<CallInst *>(inst)->SetInlined(true); 372 inst->SetFlag(inst_flags::NO_DST); 373 return *this; 374 } 375 Scale(uint64_t scale)376 IrConstructor &Scale(uint64_t scale) 377 { 378 auto inst = CurrentInst(); 379 return *this; 380 } 381 Imm(uint64_t imm)382 IrConstructor &Imm(uint64_t imm) 383 { 384 auto inst = CurrentInst(); 385 switch (inst->GetOpcode()) { 386 case Opcode::AddI: 387 case Opcode::SubI: 388 case Opcode::MulI: 389 case Opcode::DivI: 390 case Opcode::ModI: 391 case Opcode::ShlI: 392 case Opcode::ShrI: 393 case Opcode::AShrI: 394 case Opcode::AndI: 395 case Opcode::OrI: 396 case Opcode::XorI: 397 static_cast<BinaryImmOperation *>(inst)->SetImm(imm); 398 break; 399 case Opcode::BoundsCheckI: 400 inst->CastToBoundsCheckI()->SetImm(imm); 401 break; 402 case Opcode::LoadArrayI: 403 inst->CastToLoadArrayI()->SetImm(imm); 404 break; 405 case Opcode::StoreArrayI: 406 inst->CastToStoreArrayI()->SetImm(imm); 407 break; 408 case Opcode::LoadI: 409 inst->CastToLoadI()->SetImm(imm); 410 break; 411 case Opcode::StoreI: 412 inst->CastToStoreI()->SetImm(imm); 413 break; 414 case Opcode::ReturnI: 415 inst->CastToReturnI()->SetImm(imm); 416 break; 417 case Opcode::IfImm: 418 inst->CastToIfImm()->SetImm(imm); 419 break; 420 case Opcode::SelectImm: 421 inst->CastToSelectImm()->SetImm(imm); 422 break; 423 case Opcode::LoadArrayPairI: 424 inst->CastToLoadArrayPairI()->SetImm(imm); 425 break; 426 case Opcode::StoreArrayPairI: 427 inst->CastToStoreArrayPairI()->SetImm(imm); 428 break; 429 case Opcode::LoadPairPart: 430 inst->CastToLoadPairPart()->SetImm(imm); 431 break; 432 case Opcode::DeoptimizeCompareImm: 433 inst->CastToDeoptimizeCompareImm()->SetImm(imm); 434 break; 435 default: 436 UNREACHABLE(); 437 } 438 return *this; 439 } 440 Shift(ShiftType shift_type,uint64_t imm)441 IrConstructor &Shift(ShiftType shift_type, uint64_t imm) 442 { 443 auto inst = CurrentInst(); 444 switch (inst->GetOpcode()) { 445 case Opcode::AndSR: 446 case Opcode::OrSR: 447 case Opcode::XorSR: 448 case Opcode::AndNotSR: 449 case Opcode::OrNotSR: 450 case Opcode::XorNotSR: 451 case Opcode::AddSR: 452 case Opcode::SubSR: 453 static_cast<BinaryShiftedRegisterOperation *>(inst)->SetShiftType(shift_type); 454 static_cast<BinaryShiftedRegisterOperation *>(inst)->SetImm(imm); 455 break; 456 case Opcode::NegSR: 457 static_cast<UnaryShiftedRegisterOperation *>(inst)->SetShiftType(shift_type); 458 static_cast<UnaryShiftedRegisterOperation *>(inst)->SetImm(imm); 459 break; 460 default: 461 UNREACHABLE(); 462 } 463 return *this; 464 }; 465 Exit()466 IrConstructor &Exit() 467 { 468 CurrentInst()->CastToMonitor()->SetExit(); 469 return *this; 470 } 471 Entry()472 IrConstructor &Entry() 473 { 474 CurrentInst()->CastToMonitor()->SetEntry(); 475 return *this; 476 } 477 Fcmpg(bool fcmpg)478 IrConstructor &Fcmpg(bool fcmpg) 479 { 480 CurrentInst()->CastToCmp()->SetFcmpg(fcmpg); 481 return *this; 482 } 483 u8()484 IrConstructor &u8() // NOLINT(readability-identifier-naming) 485 { 486 CurrentInst()->SetType(DataType::UINT8); 487 return *this; 488 } u16()489 IrConstructor &u16() // NOLINT(readability-identifier-naming) 490 { 491 CurrentInst()->SetType(DataType::UINT16); 492 return *this; 493 } u32()494 IrConstructor &u32() // NOLINT(readability-identifier-naming) 495 { 496 CurrentInst()->SetType(DataType::UINT32); 497 return *this; 498 } u64()499 IrConstructor &u64() // NOLINT(readability-identifier-naming) 500 { 501 CurrentInst()->SetType(DataType::UINT64); 502 return *this; 503 } s8()504 IrConstructor &s8() // NOLINT(readability-identifier-naming) 505 { 506 CurrentInst()->SetType(DataType::INT8); 507 return *this; 508 } s16()509 IrConstructor &s16() // NOLINT(readability-identifier-naming) 510 { 511 CurrentInst()->SetType(DataType::INT16); 512 return *this; 513 } s32()514 IrConstructor &s32() // NOLINT(readability-identifier-naming) 515 { 516 CurrentInst()->SetType(DataType::INT32); 517 return *this; 518 } s64()519 IrConstructor &s64() // NOLINT(readability-identifier-naming) 520 { 521 CurrentInst()->SetType(DataType::INT64); 522 return *this; 523 } i8()524 IrConstructor &i8() // NOLINT(readability-identifier-naming) 525 { 526 return s8(); 527 } i16()528 IrConstructor &i16() // NOLINT(readability-identifier-naming) 529 { 530 return s16(); 531 } i32()532 IrConstructor &i32() // NOLINT(readability-identifier-naming) 533 { 534 return s32(); 535 } i64()536 IrConstructor &i64() // NOLINT(readability-identifier-naming) 537 { 538 return s64(); 539 } b()540 IrConstructor &b() // NOLINT(readability-identifier-naming) 541 { 542 CurrentInst()->SetType(DataType::BOOL); 543 return *this; 544 } ref()545 IrConstructor &ref() // NOLINT(readability-identifier-naming) 546 { 547 CurrentInst()->SetType(DataType::REFERENCE); 548 return *this; 549 } ptr()550 IrConstructor &ptr() // NOLINT(readability-identifier-naming) 551 { 552 CurrentInst()->SetType(DataType::POINTER); 553 return *this; 554 } w()555 IrConstructor &w() // NOLINT(readability-identifier-naming) 556 { 557 return ptr(); 558 } 559 // Type representing MarkWord mw()560 IrConstructor &mw() // NOLINT(readability-identifier-naming) 561 { 562 return type(MARK_WORD_TYPE); 563 } f32()564 IrConstructor &f32() // NOLINT(readability-identifier-naming) 565 { 566 CurrentInst()->SetType(DataType::FLOAT32); 567 return *this; 568 } f64()569 IrConstructor &f64() // NOLINT(readability-identifier-naming) 570 { 571 CurrentInst()->SetType(DataType::FLOAT64); 572 return *this; 573 } any()574 IrConstructor &any() // NOLINT(readability-identifier-naming) 575 { 576 CurrentInst()->SetType(DataType::ANY); 577 return *this; 578 } SetType(DataType::Type type)579 IrConstructor &SetType(DataType::Type type) // NOLINT(readability-identifier-naming) 580 { 581 CurrentInst()->SetType(type); 582 return *this; 583 } AnyType(AnyBaseType any_type)584 IrConstructor &AnyType(AnyBaseType any_type) 585 { 586 auto *atm = static_cast<AnyTypeMixin<FixedInputsInst1> *>(CurrentInst()); 587 atm->SetAnyType(any_type); 588 return *this; 589 } v0id()590 IrConstructor &v0id() // NOLINT(readability-identifier-naming) 591 { 592 CurrentInst()->SetType(DataType::VOID); 593 return *this; 594 } type(DataType::Type type)595 IrConstructor &type(DataType::Type type) // NOLINT(readability-identifier-naming) 596 { 597 CurrentInst()->SetType(type); 598 return *this; 599 } 600 Terminator()601 IrConstructor &Terminator() 602 { 603 CurrentInst()->SetFlag(inst_flags::TERMINATOR); 604 return *this; 605 } 606 AddImm(uint32_t imm)607 IrConstructor &AddImm(uint32_t imm) 608 { 609 CurrentInst()->CastToIntrinsic()->AddImm(graph_->GetAllocator(), imm); 610 return *this; 611 } 612 DstReg(uint8_t reg)613 IrConstructor &DstReg(uint8_t reg) 614 { 615 CurrentInst()->SetDstReg(reg); 616 if (DataType::IsFloatType(CurrentInst()->GetType())) { 617 graph_->SetUsedReg<DataType::FLOAT64>(reg); 618 } 619 return *this; 620 } 621 SrcReg(uint8_t id,uint8_t reg)622 IrConstructor &SrcReg(uint8_t id, uint8_t reg) 623 { 624 CurrentInst()->SetSrcReg(id, reg); 625 if (DataType::IsFloatType(CurrentInst()->GetType())) { 626 graph_->SetUsedReg<DataType::FLOAT64>(reg); 627 } 628 graph_->SetUsedReg<DataType::INT64>(reg); 629 return *this; 630 } 631 TypeId(uint32_t type_id)632 IrConstructor &TypeId(uint32_t type_id) 633 { 634 auto inst = CurrentInst(); 635 switch (inst->GetOpcode()) { 636 case Opcode::Call: 637 inst->CastToCall()->SetCallMethodId(type_id); 638 break; 639 case Opcode::LoadString: 640 inst->CastToLoadString()->SetTypeId(type_id); 641 break; 642 case Opcode::LoadType: 643 inst->CastToLoadType()->SetTypeId(type_id); 644 break; 645 case Opcode::UnresolvedLoadType: 646 inst->CastToUnresolvedLoadType()->SetTypeId(type_id); 647 break; 648 case Opcode::StoreStatic: 649 inst->CastToStoreStatic()->SetTypeId(type_id); 650 break; 651 case Opcode::UnresolvedStoreStatic: 652 inst->CastToUnresolvedStoreStatic()->SetTypeId(type_id); 653 break; 654 case Opcode::LoadStatic: 655 inst->CastToLoadStatic()->SetTypeId(type_id); 656 break; 657 case Opcode::UnresolvedLoadStatic: 658 inst->CastToUnresolvedLoadStatic()->SetTypeId(type_id); 659 break; 660 case Opcode::LoadObject: 661 inst->CastToLoadObject()->SetTypeId(type_id); 662 break; 663 case Opcode::UnresolvedLoadObject: 664 inst->CastToUnresolvedLoadObject()->SetTypeId(type_id); 665 break; 666 case Opcode::StoreObject: 667 inst->CastToStoreObject()->SetTypeId(type_id); 668 break; 669 case Opcode::UnresolvedStoreObject: 670 inst->CastToUnresolvedStoreObject()->SetTypeId(type_id); 671 break; 672 case Opcode::NewObject: 673 inst->CastToNewObject()->SetTypeId(type_id); 674 break; 675 case Opcode::InitObject: 676 inst->CastToInitObject()->SetCallMethodId(type_id); 677 break; 678 case Opcode::NewArray: 679 inst->CastToNewArray()->SetTypeId(type_id); 680 break; 681 case Opcode::CheckCast: 682 inst->CastToCheckCast()->SetTypeId(type_id); 683 break; 684 case Opcode::IsInstance: 685 inst->CastToIsInstance()->SetTypeId(type_id); 686 break; 687 case Opcode::InitClass: 688 inst->CastToInitClass()->SetTypeId(type_id); 689 break; 690 case Opcode::LoadClass: 691 inst->CastToLoadClass()->SetTypeId(type_id); 692 break; 693 case Opcode::LoadAndInitClass: 694 inst->CastToLoadAndInitClass()->SetTypeId(type_id); 695 break; 696 case Opcode::UnresolvedLoadAndInitClass: 697 inst->CastToUnresolvedLoadAndInitClass()->SetTypeId(type_id); 698 break; 699 default: 700 UNREACHABLE(); 701 } 702 return *this; 703 } 704 ObjField(RuntimeInterface::FieldPtr field)705 IrConstructor &ObjField(RuntimeInterface::FieldPtr field) 706 { 707 return *this; 708 } 709 SetNeedBarrier(bool need_barrier)710 IrConstructor &SetNeedBarrier(bool need_barrier) 711 { 712 auto inst = CurrentInst(); 713 switch (inst->GetOpcode()) { 714 case Opcode::Store: 715 inst->CastToStore()->SetNeedBarrier(need_barrier); 716 break; 717 case Opcode::StoreI: 718 inst->CastToStoreI()->SetNeedBarrier(need_barrier); 719 break; 720 case Opcode::StoreObject: 721 inst->CastToStoreObject()->SetNeedBarrier(need_barrier); 722 break; 723 case Opcode::StoreArray: 724 inst->CastToStoreArray()->SetNeedBarrier(need_barrier); 725 break; 726 case Opcode::StoreArrayI: 727 inst->CastToStoreArrayI()->SetNeedBarrier(need_barrier); 728 break; 729 case Opcode::StoreArrayPair: 730 inst->CastToStoreArrayPair()->SetNeedBarrier(need_barrier); 731 break; 732 case Opcode::StoreArrayPairI: 733 inst->CastToStoreArrayPairI()->SetNeedBarrier(need_barrier); 734 break; 735 default: 736 UNREACHABLE(); 737 } 738 return *this; 739 } 740 SrcVregs(std::vector<int> && vregs)741 IrConstructor &SrcVregs(std::vector<int> &&vregs) 742 { 743 ASSERT(CurrentInst()->IsSaveState()); 744 if (!vregs.empty()) { 745 graph_->SetVRegsCount( 746 std::max<size_t>(graph_->GetVRegsCount(), *std::max_element(vregs.begin(), vregs.end()))); 747 } 748 if (save_state_inst_vregs_map_.count(CurrentInstIndex()) == 0) { 749 save_state_inst_vregs_map_.emplace(CurrentInstIndex(), std::move(vregs)); 750 } 751 return *this; 752 } 753 NoVregs()754 IrConstructor &NoVregs() 755 { 756 ASSERT(CurrentInst()->IsSaveState()); 757 return *this; 758 } 759 CatchTypeIds(std::vector<uint16_t> && ids)760 IrConstructor &CatchTypeIds(std::vector<uint16_t> &&ids) 761 { 762 auto inst = CurrentInst(); 763 ASSERT(inst->GetOpcode() == Opcode::Try); 764 auto try_inst = inst->CastToTry(); 765 for (auto id : ids) { 766 try_inst->AppendCatchTypeId(id, 0); 767 } 768 return *this; 769 } 770 ThrowableInsts(std::vector<int> && ids)771 IrConstructor &ThrowableInsts(std::vector<int> &&ids) 772 { 773 auto inst = CurrentInst(); 774 ASSERT(inst->GetOpcode() == Opcode::CatchPhi); 775 auto catch_phi = inst->CastToCatchPhi(); 776 for (auto id : ids) { 777 ASSERT(inst_map_.count(id) > 0); 778 catch_phi->AppendThrowableInst(inst_map_.at(id)); 779 } 780 return *this; 781 } 782 DeoptimizeType(DeoptimizeType type)783 IrConstructor &DeoptimizeType(DeoptimizeType type) 784 { 785 auto inst = CurrentInst(); 786 if (inst->GetOpcode() == Opcode::Deoptimize) { 787 inst->CastToDeoptimize()->SetDeoptimizeType(type); 788 } else { 789 ASSERT(inst->GetOpcode() == Opcode::DeoptimizeIf); 790 inst->CastToDeoptimizeIf()->SetDeoptimizeType(type); 791 } 792 return *this; 793 } 794 SrcType(DataType::Type type)795 IrConstructor &SrcType(DataType::Type type) 796 { 797 auto inst = CurrentInst(); 798 switch (inst->GetOpcode()) { 799 case Opcode::Cmp: 800 inst->CastToCmp()->SetOperandsType(type); 801 break; 802 case Opcode::Compare: 803 inst->CastToCompare()->SetOperandsType(type); 804 break; 805 case Opcode::If: 806 inst->CastToIf()->SetOperandsType(type); 807 break; 808 case Opcode::IfImm: 809 inst->CastToIfImm()->SetOperandsType(type); 810 break; 811 default: 812 UNREACHABLE(); 813 } 814 return *this; 815 } 816 IntrinsicId(RuntimeInterface::IntrinsicId id)817 IrConstructor &IntrinsicId(RuntimeInterface::IntrinsicId id) 818 { 819 auto inst = CurrentInst(); 820 ASSERT(inst->IsIntrinsic()); 821 inst->CastToIntrinsic()->SetIntrinsicId(id); 822 AdjustFlags(id, inst); 823 return *this; 824 } 825 Relocate()826 IrConstructor &Relocate() 827 { 828 auto inst = CurrentInst(); 829 ASSERT(inst->IsIntrinsic()); 830 inst->CastToIntrinsic()->SetRelocate(); 831 return *this; 832 } 833 Class(RuntimeInterface::ClassPtr klass)834 IrConstructor &Class(RuntimeInterface::ClassPtr klass) 835 { 836 auto inst = CurrentInst(); 837 switch (inst->GetOpcode()) { 838 case Opcode::InitClass: 839 inst->CastToInitClass()->SetClass(klass); 840 break; 841 case Opcode::LoadClass: 842 inst->CastToLoadClass()->SetClass(klass); 843 break; 844 case Opcode::LoadAndInitClass: 845 inst->CastToLoadAndInitClass()->SetClass(klass); 846 break; 847 case Opcode::UnresolvedLoadAndInitClass: 848 inst->CastToUnresolvedLoadAndInitClass()->SetClass(klass); 849 break; 850 default: 851 UNREACHABLE(); 852 } 853 return *this; 854 } 855 856 template <typename T> ScopedLife()857 std::shared_ptr<IrConstructor> ScopedLife() 858 { 859 #ifndef __clang_analyzer__ 860 if constexpr (std::is_same_v<T, BasicBlock>) { 861 return std::shared_ptr<IrConstructor>(this, [](IrConstructor *b) { b->ResetCurrentBb(); }); 862 } else if constexpr (std::is_same_v<T, Inst>) { 863 return std::shared_ptr<IrConstructor>(this, [](IrConstructor *b) { b->ResetCurrentInst(); }); 864 } else if constexpr (std::is_same_v<T, Graph>) { 865 return std::shared_ptr<IrConstructor>(this, [](IrConstructor *b) { b->Finalize(); }); 866 } 867 #else 868 return nullptr; 869 #endif 870 } 871 CheckInputType(Inst * inst,Inst * input_inst,size_t input_idx)872 void CheckInputType(Inst *inst, Inst *input_inst, size_t input_idx) 873 { 874 auto type = input_inst->GetType(); 875 auto prev_type = inst->GetInputType(input_idx); 876 if (prev_type == DataType::Type::NO_TYPE) { 877 switch (inst->GetOpcode()) { 878 case Opcode::Cmp: 879 inst->CastToCmp()->SetOperandsType(type); 880 break; 881 case Opcode::Compare: 882 inst->CastToCompare()->SetOperandsType(type); 883 break; 884 case Opcode::If: 885 inst->CastToIf()->SetOperandsType(type); 886 break; 887 case Opcode::IfImm: 888 inst->CastToIfImm()->SetOperandsType(type); 889 break; 890 case Opcode::Select: 891 inst->CastToSelect()->SetOperandsType(type); 892 break; 893 case Opcode::SelectImm: 894 inst->CastToSelectImm()->SetOperandsType(type); 895 break; 896 default: 897 UNREACHABLE(); 898 } 899 } else { 900 CHECK_EQ(type, prev_type); 901 } 902 } 903 ConstructControlFlow()904 void ConstructControlFlow() 905 { 906 for (auto [bbi, succs] : bb_succs_map_) { 907 auto bb = bb_map_.at(bbi); 908 for (auto succ : succs) { 909 bb->AddSucc(bb_map_.at(succ == -1 ? 1 : succ)); 910 } 911 if (succs.size() > 1 && bb->IsEmpty()) { 912 bb->SetTryEnd(true); 913 } 914 } 915 auto end_block = graph_->GetEndBlock(); 916 if (end_block->GetPredsBlocks().empty()) { 917 graph_->EraseBlock(end_block); 918 graph_->SetEndBlock(nullptr); 919 } 920 } 921 ConstructDataFlow()922 void ConstructDataFlow() 923 { 924 for (auto [insti, inputs] : inst_inputs_map_) { 925 auto inst = inst_map_.at(insti); 926 const auto &vregs {inst->IsSaveState() ? save_state_inst_vregs_map_[insti] : std::vector<int> {}}; 927 ASSERT(!inst->IsSaveState() || inputs.size() == vregs.size()); 928 size_t idx = 0; 929 if (inst->IsOperandsDynamic()) { 930 inst->ReserveInputs(inputs.size()); 931 } 932 for (auto input_idx : inputs) { 933 ASSERT_DO(inst_map_.find(input_idx) != inst_map_.end(), 934 std::cerr << "Input with Id " << input_idx << " isn't found, inst: " << *inst << std::endl); 935 auto input_inst = inst_map_.at(input_idx); 936 auto op = inst->GetOpcode(); 937 if (!input_inst->IsConst() && 938 (op == Opcode::Cmp || op == Opcode::Compare || op == Opcode::If || op == Opcode::IfImm || 939 op == Opcode::Select || op == Opcode::SelectImm)) { 940 CheckInputType(inst, input_inst, idx); 941 } 942 if (inst->IsOperandsDynamic()) { 943 inst->AppendInput(input_inst); 944 if (inst->IsSaveState()) { 945 static_cast<SaveStateInst *>(inst)->SetVirtualRegister(idx, VirtualRegister(vregs[idx], false)); 946 } 947 } else { 948 inst->SetInput(idx, input_inst); 949 } 950 ++idx; 951 } 952 } 953 954 for (auto [insti, inputs] : phi_inst_inputs_map_) { 955 auto inst = inst_map_.at(insti); 956 for (auto input : inputs) { 957 auto input_inst = inst_map_.at(input.second); 958 size_t idx = inst->GetBasicBlock()->GetPredBlockIndex(bb_map_.at(input.first)); 959 auto i {inst->AppendInput(input_inst)}; 960 inst->CastToPhi()->SetPhiInputBbNum(i, idx); 961 } 962 } 963 } 964 UpdateSpecialFlags()965 void UpdateSpecialFlags() 966 { 967 int max_id = graph_->GetCurrentInstructionId(); 968 for (auto pair : inst_map_) { 969 auto id = pair.first; 970 auto inst = pair.second; 971 if (inst->GetType() == DataType::REFERENCE) { 972 if (inst->GetOpcode() == Opcode::StoreArray) { 973 inst->CastToStoreArray()->SetNeedBarrier(true); 974 } 975 if (inst->GetOpcode() == Opcode::StoreArrayI) { 976 inst->CastToStoreArrayI()->SetNeedBarrier(true); 977 } 978 if (inst->GetOpcode() == Opcode::StoreStatic) { 979 inst->CastToStoreStatic()->SetNeedBarrier(true); 980 } 981 if (inst->GetOpcode() == Opcode::UnresolvedStoreStatic) { 982 inst->CastToUnresolvedStoreStatic()->SetNeedBarrier(true); 983 } 984 if (inst->GetOpcode() == Opcode::StoreObject) { 985 inst->CastToStoreObject()->SetNeedBarrier(true); 986 } 987 if (inst->GetOpcode() == Opcode::UnresolvedStoreObject) { 988 inst->CastToUnresolvedStoreObject()->SetNeedBarrier(true); 989 } 990 if (inst->GetOpcode() == Opcode::StoreArrayPair) { 991 inst->CastToStoreArrayPair()->SetNeedBarrier(true); 992 } 993 if (inst->GetOpcode() == Opcode::StoreArrayPairI) { 994 inst->CastToStoreArrayPairI()->SetNeedBarrier(true); 995 } 996 } 997 if (inst->GetOpcode() == Opcode::Try) { 998 auto bb = inst->GetBasicBlock(); 999 bb->SetTryBegin(true); 1000 bb->GetSuccessor(0)->SetTry(true); 1001 for (size_t idx = 1; idx < bb->GetSuccsBlocks().size(); idx++) { 1002 bb->GetSuccessor(idx)->SetCatchBegin(true); 1003 } 1004 } 1005 if (inst->GetOpcode() == Opcode::SaveStateOsr) { 1006 inst->GetBasicBlock()->SetOsrEntry(true); 1007 } 1008 if (id >= max_id) { 1009 max_id = id + 1; 1010 } 1011 } 1012 graph_->SetCurrentInstructionId(max_id); 1013 } 1014 1015 // Create SaveState instructions thet weren't explicitly constructed in the test CreateSaveStates()1016 void CreateSaveStates() 1017 { 1018 for (auto [insti, inputs] : inst_inputs_map_) { 1019 auto inst = inst_map_.at(insti); 1020 if (!inst->IsOperandsDynamic() && inst->RequireState() && inst->GetInputsCount() > inputs.size()) { 1021 auto save_state = graph_->CreateInstSaveState(); 1022 save_state->SetId(static_cast<int>(graph_->GetCurrentInstructionId()) + 1); 1023 graph_->SetCurrentInstructionId(save_state->GetId() + 1); 1024 inst->GetBasicBlock()->InsertBefore(save_state, inst); 1025 inst->SetSaveState(save_state); 1026 } 1027 } 1028 } 1029 SetSpillFillData()1030 void SetSpillFillData() 1031 { 1032 graph_->ResetParameterInfo(); 1033 // Count number of parameters (needed for bytecode optimizer) in first cycle and set SpillFillData for each 1034 // parameter in second cycle 1035 uint32_t num_args = 0; 1036 for (auto inst : graph_->GetStartBlock()->Insts()) { 1037 if (inst->GetOpcode() == Opcode::Parameter) { 1038 ++num_args; 1039 } 1040 } 1041 uint32_t i = 0; 1042 for (auto inst : graph_->GetStartBlock()->Insts()) { 1043 if (inst->GetOpcode() != Opcode::Parameter) { 1044 continue; 1045 } 1046 ++i; 1047 1048 auto type = inst->GetType(); 1049 InstBuilder::SetParamSpillFill(graph_, static_cast<ParameterInst *>(inst), num_args, i - 1, type); 1050 } 1051 } 1052 Finalize()1053 void Finalize() 1054 { 1055 ConstructControlFlow(); 1056 ConstructDataFlow(); 1057 UpdateSpecialFlags(); 1058 CreateSaveStates(); 1059 SetSpillFillData(); 1060 ResetCurrentBb(); 1061 ResetCurrentInst(); 1062 graph_->RunPass<LoopAnalyzer>(); 1063 PropagateRegisters(); 1064 if (enable_graph_checker_) { 1065 GraphChecker(graph_).Check(); 1066 } 1067 } 1068 GetInst(unsigned index)1069 Inst &GetInst(unsigned index) 1070 { 1071 return *inst_map_.at(index); 1072 } 1073 GetBlock(unsigned index)1074 BasicBlock &GetBlock(unsigned index) 1075 { 1076 return *bb_map_.at(index); 1077 } 1078 EnableGraphChecker(bool value)1079 void EnableGraphChecker(bool value) 1080 { 1081 enable_graph_checker_ = value; 1082 } 1083 1084 private: AddInput(int v)1085 void AddInput(int v) 1086 { 1087 inst_inputs_map_[CurrentInstIndex()].push_back(v); 1088 } 1089 1090 template <typename T, typename... Args> AddInput(T v,Args...args)1091 void AddInput(T v, Args... args) 1092 { 1093 inst_inputs_map_[CurrentInstIndex()].push_back(v); 1094 AddInput(args...); 1095 } 1096 GetBbByIndex(int index)1097 BasicBlock *GetBbByIndex(int index) 1098 { 1099 return bb_map_.at(index); 1100 } 1101 CurrentBb()1102 BasicBlock *CurrentBb() 1103 { 1104 return current_bb_.second; 1105 } 1106 CurrentBbIndex()1107 int CurrentBbIndex() 1108 { 1109 return current_bb_.first; 1110 } 1111 CurrentInst()1112 Inst *CurrentInst() 1113 { 1114 return current_inst_.second; 1115 } 1116 CurrentInstIndex()1117 int CurrentInstIndex() 1118 { 1119 return current_inst_.first; 1120 } 1121 ResetCurrentBb()1122 void ResetCurrentBb() 1123 { 1124 current_bb_ = {-1, nullptr}; 1125 } 1126 ResetCurrentInst()1127 void ResetCurrentInst() 1128 { 1129 current_inst_ = {-1, nullptr}; 1130 } 1131 PropagateRegisters()1132 void PropagateRegisters() 1133 { 1134 for (auto bb : graph_->GetBlocksRPO()) { 1135 for (auto inst : bb->AllInsts()) { 1136 if (inst->GetDstReg() != INVALID_REG && !inst->IsOperandsDynamic()) { 1137 for (size_t i = 0; i < inst->GetInputsCount(); i++) { 1138 inst->SetSrcReg(i, inst->GetInputs()[i].GetInst()->GetDstReg()); 1139 } 1140 } 1141 } 1142 } 1143 } 1144 1145 private: 1146 Graph *graph_ {nullptr}; 1147 ArenaAllocator aa_; 1148 std::pair<int, BasicBlock *> current_bb_; 1149 std::pair<int, Inst *> current_inst_; 1150 ArenaUnorderedMap<int, BasicBlock *> bb_map_ {aa_.Adapter()}; 1151 ArenaVector<std::pair<int, std::vector<int>>> bb_succs_map_ {aa_.Adapter()}; 1152 ArenaUnorderedMap<int, Inst *> inst_map_ {aa_.Adapter()}; 1153 ArenaUnorderedMap<int, std::vector<int>> inst_inputs_map_ {aa_.Adapter()}; 1154 ArenaUnorderedMap<int, std::vector<int>> save_state_inst_vregs_map_ {aa_.Adapter()}; 1155 ArenaUnorderedMap<int, std::vector<std::pair<int, int>>> phi_inst_inputs_map_ {aa_.Adapter()}; 1156 bool enable_graph_checker_ {true}; 1157 }; 1158 1159 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 1160 #define GRAPH(GRAPH) if (auto __g = builder_->SetGraph(GRAPH).ScopedLife<Graph>(); true) 1161 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 1162 #define BASIC_BLOCK(ID, ...) \ 1163 if (auto __b = builder_->NewBlock<ID>().Succs({__VA_ARGS__}).ScopedLife<BasicBlock>(); true) 1164 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 1165 #define INST(ID, ...) builder_->NewInst(ID, __VA_ARGS__) 1166 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 1167 #define CONSTANT(ID, VALUE) builder_->NewConstant(ID, VALUE) 1168 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 1169 #define PARAMETER(ID, ARG_NUM) builder_->NewParameter(ID, ARG_NUM) 1170 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 1171 #define INS(INDEX) builder_->GetInst(INDEX) 1172 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 1173 #define BB(INDEX) builder_->GetBlock(INDEX) 1174 } // namespace panda::compiler 1175 1176 #endif // COMPILER_OPTIMIZER_IR_IR_CONSTRUCTOR_H 1177