1 /** 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16 #ifndef COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H_ 17 #define COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H_ 18 19 /* 20 Codegen interface for compiler 21 ! Do not use this file in runtime 22 */ 23 24 #include "code_info/code_info_builder.h" 25 #include "compiler_logger.h" 26 #include "disassembly.h" 27 #include "frame_info.h" 28 #include "optimizer/analysis/live_registers.h" 29 #include "optimizer/code_generator/callconv.h" 30 #include "optimizer/code_generator/encode.h" 31 #include "optimizer/code_generator/registers_description.h" 32 #include "optimizer/code_generator/slow_path.h" 33 #include "optimizer/code_generator/spill_fill_encoder.h" 34 #include "optimizer/code_generator/target_info.h" 35 #include "optimizer/ir/analysis.h" 36 #include "optimizer/ir/graph.h" 37 #include "optimizer/ir/graph_visitor.h" 38 #include "optimizer/optimizations/regalloc/spill_fills_resolver.h" 39 #include "optimizer/pass_manager.h" 40 #include "utils/cframe_layout.h" 41 42 namespace panda::compiler { 43 // Maximum size in bytes 44 constexpr size_t INST_IN_SLOW_PATH = 64; 45 46 class Encoder; 47 class CodeBuilder; 48 class OsrEntryStub; 49 50 class Codegen : public Optimization { 51 using EntrypointId = RuntimeInterface::EntrypointId; 52 53 public: 54 explicit Codegen(Graph *graph); 55 NO_MOVE_SEMANTIC(Codegen); 56 NO_COPY_SEMANTIC(Codegen); 57 58 ~Codegen() override = default; 59 60 bool RunImpl() override; 61 const char *GetPassName() const override; 62 bool AbortIfFailed() const override; 63 64 static bool Run(Graph *graph); 65 GetAllocator()66 ArenaAllocator *GetAllocator() const 67 { 68 return allocator_; 69 } GetLocalAllocator()70 ArenaAllocator *GetLocalAllocator() const 71 { 72 return local_allocator_; 73 } GetFrameInfo()74 FrameInfo *GetFrameInfo() const 75 { 76 return frame_info_; 77 } SetFrameInfo(FrameInfo * frame_info)78 void SetFrameInfo(FrameInfo *frame_info) 79 { 80 frame_info_ = frame_info; 81 } 82 virtual void CreateFrameInfo(); 83 GetRuntime()84 RuntimeInterface *GetRuntime() const 85 { 86 return runtime_; 87 } GetRegfile()88 RegistersDescription *GetRegfile() const 89 { 90 return regfile_; 91 } GetEncoder()92 Encoder *GetEncoder() const 93 { 94 return enc_; 95 } GetCallingConvention()96 CallingConvention *GetCallingConvention() const 97 { 98 return callconv_; 99 } 100 GetGraphVisitor()101 GraphVisitor *GetGraphVisitor() const 102 { 103 return visitor_; 104 } 105 GetLabelEntry()106 LabelHolder::LabelId GetLabelEntry() const 107 { 108 return label_entry_; 109 } 110 GetLabelExit()111 LabelHolder::LabelId GetLabelExit() const 112 { 113 return label_exit_; 114 } 115 GetMethodId()116 RuntimeInterface::MethodId GetMethodId() 117 { 118 return method_id_; 119 } 120 SetStartCodeOffset(size_t offset)121 void SetStartCodeOffset(size_t offset) 122 { 123 start_code_offset_ = offset; 124 } 125 GetStartCodeOffset()126 size_t GetStartCodeOffset() const 127 { 128 return start_code_offset_; 129 } 130 131 size_t GetLanguageExtensionOffsetFromSpInBytes(); 132 133 void Convert(ArenaVector<Reg> *regs_usage, const ArenaVector<bool> *mask, TypeInfo type_info); 134 135 Reg ConvertRegister(Register ref, DataType::Type type = DataType::Type::INT64); 136 137 Imm ConvertImm(uint64_t imm, DataType::Type type); 138 139 Imm ConvertImmWithExtend(uint64_t imm, DataType::Type type); 140 141 Imm ConvertImm(ConstantInst *const_inst, DataType::Type type); 142 143 Condition ConvertCc(ConditionCode cc); 144 Condition ConvertCcOverflow(ConditionCode cc); 145 ConvertDataType(DataType::Type type,Arch arch)146 static inline TypeInfo ConvertDataType(DataType::Type type, Arch arch) 147 { 148 return TypeInfo::FromDataType(type, arch); 149 } 150 GetArch()151 Arch GetArch() const 152 { 153 return GetTarget().GetArch(); 154 } 155 GetTarget()156 Target GetTarget() const 157 { 158 return target_; 159 } 160 GetPtrRegType()161 TypeInfo GetPtrRegType() const 162 { 163 return target_.GetPtrRegType(); 164 } 165 GetCodeBuilder()166 CodeInfoBuilder *GetCodeBuilder() const 167 { 168 return code_builder_; 169 } 170 171 void CreateStackMap(Inst *inst, Inst *user = nullptr); 172 173 void CreateStackMapRec(SaveStateInst *save_state, bool require_vreg_map, Inst *target_site); 174 void CreateVRegMap(SaveStateInst *save_state, size_t vregs_count, Inst *target_site); 175 void CreateVreg(const Location &location, Inst *inst, const VirtualRegister &vreg); 176 void FillVregIndices(SaveStateInst *save_state); 177 178 void CreateOsrEntry(SaveStateInst *save_state); 179 180 void CreateVRegForRegister(const Location &location, Inst *inst, const VirtualRegister &vreg); 181 182 /** 183 * 'live_inputs' shows that inst's source registers should be added the the mask 184 */ 185 template <bool live_inputs = false> GetLiveRegisters(Inst * inst)186 std::pair<RegMask, VRegMask> GetLiveRegisters(Inst *inst) 187 { 188 RegMask live_regs; 189 VRegMask live_fp_regs; 190 if (!options.IsCompilerSaveOnlyLiveRegisters() || inst == nullptr) { 191 live_regs.set(); 192 live_fp_regs.set(); 193 return {live_regs, live_fp_regs}; 194 } 195 // Run LiveRegisters pass only if it is actually required 196 if (!GetGraph()->IsAnalysisValid<LiveRegisters>()) { 197 GetGraph()->RunPass<LiveRegisters>(); 198 } 199 200 // Add registers from intervals that are live at inst's definition 201 auto &lr = GetGraph()->GetAnalysis<LiveRegisters>(); 202 lr.VisitIntervalsWithLiveRegisters<live_inputs>(inst, [&live_regs, &live_fp_regs, this](const auto &li) { 203 auto reg = ConvertRegister(li->GetReg(), li->GetType()); 204 GetEncoder()->SetRegister(&live_regs, &live_fp_regs, reg); 205 }); 206 207 // Add live temp registers 208 live_regs |= GetEncoder()->GetLiveTmpRegMask(); 209 live_fp_regs |= GetEncoder()->GetLiveTmpFpRegMask(); 210 211 return {live_regs, live_fp_regs}; 212 } 213 214 // Limits live register set to a number of registers used to pass parameters to the runtime call: 215 // 1) these ones are saved/restored by caller 216 // 2) the remaining ones are saved/restored by the bridge function (aarch only) 217 void FillOnlyParameters(RegMask *live_regs, uint32_t num_params) const; 218 219 template <typename T, typename... Args> CreateSlowPath(Inst * inst,Args &&...args)220 T *CreateSlowPath(Inst *inst, Args &&... args) 221 { 222 static_assert(std::is_base_of_v<SlowPathBase, T>); 223 auto label = GetEncoder()->CreateLabel(); 224 auto slow_path = GetLocalAllocator()->New<T>(label, inst, std::forward<Args>(args)...); 225 slow_paths_.push_back(slow_path); 226 return slow_path; 227 } 228 229 void EmitSlowPaths(); 230 231 void InsertTrace(std::initializer_list<std::variant<Reg, Imm>> params); 232 233 void CallIntrinsic(Inst *inst, RuntimeInterface::IntrinsicId id); 234 235 // The function is used for calling runtime functions through special bridges. 236 // !NOTE Don't use the function for calling runtime without bridges(it save only parameters on stack) 237 void CallRuntime(Inst *inst, EntrypointId id, Reg dst_reg, std::initializer_list<std::variant<Reg, Imm>> params, 238 RegMask preserved_regs = {}); 239 240 template <typename... Args> CallRuntimeWithMethod(Inst * inst,void * method,EntrypointId eid,Reg dst_reg,Args &&...params)241 void CallRuntimeWithMethod(Inst *inst, void *method, EntrypointId eid, Reg dst_reg, Args &&... params) 242 { 243 if (GetGraph()->IsAotMode()) { 244 ScopedTmpReg method_reg(GetEncoder()); 245 LoadMethod(method_reg); 246 CallRuntime(inst, eid, dst_reg, {method_reg, params...}); 247 } else { 248 if (Is64BitsArch(GetArch())) { 249 CallRuntime(inst, eid, dst_reg, {Imm(reinterpret_cast<uint64_t>(method)), params...}); 250 } else { 251 // uintptr_t causes problems on host cross-jit compilation 252 CallRuntime(inst, eid, dst_reg, {Imm(down_cast<uint32_t>(method)), params...}); 253 } 254 } 255 } 256 257 void SaveRegistersForImplicitRuntime(Inst *inst, RegMask *params_mask, RegMask *mask); 258 259 void VisitNewArray(Inst *inst); 260 261 void LoadClassFromObject(Reg class_reg, Reg obj_reg); 262 void CreateCall(CallInst *call_inst); 263 void VisitCallIndirect(CallIndirectInst *inst); 264 void VisitCall(CallInst *inst); 265 void CreateUnresolvedVirtualMethodLoad(CallInst *vcall, Reg method); 266 void CreateVirtualCall(CallInst *call_inst); 267 void CreateDynamicCall(CallInst *call_inst); 268 void CreateCallIntrinsic(IntrinsicInst *inst); 269 void CreateMultiArrayCall(CallInst *call_inst); 270 void CreateNewObjCall(NewObjectInst *new_obj); 271 void CreateNewObjCallOld(NewObjectInst *new_obj); 272 void CreateMonitorCall(MonitorInst *inst); 273 void CreateMonitorCallOld(MonitorInst *inst); 274 void CreateCheckCastInterfaceCall(Inst *inst); 275 void CreateNonDefaultInitClass(ClassInst *init_inst); 276 void CreatePreWRB(Inst *inst, MemRef mem, bool store_pair = false); 277 void CreatePostWRB(Inst *inst, MemRef mem, Reg reg1, Reg reg2 = INVALID_REGISTER); 278 void EncodePostWRB(Inst *inst, MemRef mem, Reg reg1, Reg reg2, bool check_nullptr = true); 279 void CreatePostInterRegionBarrier(Inst *inst, MemRef mem, Reg reg1, Reg reg2, bool check_nullptr); 280 void CreatePostInterGenerationalBarrier(MemRef mem); 281 void CallBarrier(RegMask live_regs, VRegMask live_vregs, EntrypointId id, 282 const std::initializer_list<std::variant<Reg, Imm>> ¶ms); 283 void CreateLoadClassFromPLT(Inst *inst, Reg tmp_reg, Reg dst, size_t class_id); 284 void CreateJumpToClassResolverPltShared(Inst *inst, Reg tmp_reg, RuntimeInterface::EntrypointId id); 285 void CreateLoadTLABInformation(Reg reg_tlab_start, Reg reg_tlab_size); 286 void CreateCheckForTLABWithConstSize(Inst *inst, Reg reg_tlab_start, Reg reg_tlab_size, size_t size, 287 LabelHolder::LabelId label); 288 void CreateDebugRuntimeCallsForNewObject(Inst *inst, Reg reg_tlab_start, size_t alloc_size, RegMask preserved); 289 void CreateDebugRuntimeCallsForCreateString(Inst *inst, Reg dst); 290 void CreateReturn(const Inst *inst); 291 292 // The function alignment up the value from alignment_reg using tmp_reg. 293 void CreateAlignmentValue(Reg alignment_reg, Reg tmp_reg, size_t alignment); 294 void TryInsertImplicitNullCheck(Inst *inst, size_t prevOffset); 295 GetFrameLayout()296 const CFrameLayout &GetFrameLayout() const 297 { 298 return frame_layout_; 299 } 300 301 bool RegisterKeepCallArgument(CallInst *call_inst, Reg reg); 302 303 void LoadMethod(Reg dst); 304 void LoadFreeSlot(Reg dst); 305 void StoreFreeSlot(Reg src); 306 GetStackOffset(Location location)307 ssize_t GetStackOffset(Location location) 308 { 309 if (location.GetKind() == LocationType::STACK_ARGUMENT) { 310 return location.GetValue() * GetFrameLayout().GetSlotSize(); 311 } 312 313 if (location.GetKind() == LocationType::STACK_PARAMETER) { 314 return GetFrameLayout().GetFrameSize<CFrameLayout::BYTES>() + 315 (location.GetValue() * GetFrameLayout().GetSlotSize()); 316 } 317 318 ASSERT(location.GetKind() == LocationType::STACK); 319 return GetFrameLayout().GetSpillOffsetFromSpInBytes(location.GetValue()); 320 } 321 GetMemRefForSlot(Location location)322 MemRef GetMemRefForSlot(Location location) 323 { 324 ASSERT(location.IsAnyStack()); 325 return MemRef(SpReg(), GetStackOffset(location)); 326 } 327 SpReg()328 Reg SpReg() const 329 { 330 return GetTarget().GetStackReg(); 331 } 332 FpReg()333 Reg FpReg() const 334 { 335 return GetTarget().GetFrameReg(); 336 } 337 338 bool HasLiveCallerSavedRegs(Inst *inst); 339 void SaveCallerRegisters(RegMask live_regs, VRegMask live_vregs, bool adjust_regs); 340 void LoadCallerRegisters(RegMask live_regs, VRegMask live_vregs, bool adjust_regs); 341 342 // Initialization internal variables 343 void Initialize(); 344 GetDisasm()345 const Disassembly *GetDisasm() const 346 { 347 return &disasm_; 348 } 349 GetDisasm()350 Disassembly *GetDisasm() 351 { 352 return &disasm_; 353 } 354 AddLiveOut(const BasicBlock * bb,const Register reg)355 void AddLiveOut(const BasicBlock *bb, const Register reg) 356 { 357 live_outs_[bb].Set(reg); 358 } 359 GetLiveOut(const BasicBlock * bb)360 RegMask GetLiveOut(const BasicBlock *bb) const 361 { 362 auto it = live_outs_.find(bb); 363 return it != live_outs_.end() ? it->second : RegMask(); 364 } 365 ThreadReg()366 Reg ThreadReg() const 367 { 368 return Reg(GetThreadReg(GetArch()), GetTarget().GetPtrRegType()); 369 } 370 371 static bool InstEncodedWithLibCall(const Inst *inst, Arch arch); 372 373 protected: 374 virtual void GeneratePrologue(); 375 virtual void GenerateEpilogue(); 376 377 // Main logic steps 378 bool BeginMethod(); 379 bool VisitGraph(); 380 void EndMethod(); 381 bool CopyToCodeCache(); 382 void DumpCode(); 383 GetUsedRegs()384 RegMask GetUsedRegs() const 385 { 386 return used_regs_; 387 } GetUsedVRegs()388 RegMask GetUsedVRegs() const 389 { 390 return used_vregs_; 391 } 392 393 void FillCallParams(const std::initializer_list<std::variant<Reg, Imm>> ¶ms); 394 395 void EmitJump(const BasicBlock *bb); 396 397 bool EmitCallRuntimeCode(Inst *inst, EntrypointId id); 398 399 void PrepareAndEmitCallVirtual(CallInst *call_inst); 400 401 void IntfInlineCachePass(CallInst *call_inst, Reg method_reg, Reg tmp_reg, Reg obj_reg); 402 403 void EmitCallVirtual(Reg method_reg); 404 405 void PrepareCallVirtualAot(CallInst *call_inst, Reg method_reg); 406 void PrepareCallVirtual(CallInst *call_inst, Reg method_reg); 407 408 uint32_t GetVtableShift(); 409 410 void CalculateCardIndex(MemRef mem, ScopedTmpReg *tmp, ScopedTmpReg *tmp1); 411 412 void EmitGetUnresolvedCalleeMethod(CallInst *call_inst); 413 414 void EmitCreateCallCode(CallInst *call_inst); 415 416 void EmitEpilogueForCreateCall(CallInst *call_inst); 417 418 void CreateBuiltinIntrinsic(IntrinsicInst *inst); 419 static constexpr int32_t NUM_OF_SRC_BUILTIN = 6; 420 static constexpr uint8_t FIRST_OPERAND = 0; 421 static constexpr uint8_t SECOND_OPERAND = 1; 422 static constexpr uint8_t THIRD_OPERAND = 2; 423 static constexpr uint8_t FOURTH_OPERAND = 3; 424 static constexpr uint8_t FIFTH_OPERAND = 4; 425 using SRCREGS = std::array<Reg, NUM_OF_SRC_BUILTIN>; 426 // implementation is generated with compiler/optimizer/templates/intrinsics/intrinsics_codegen.inl.erb 427 void FillBuiltin(IntrinsicInst *inst, SRCREGS src, Reg dst, RegMask *mask); 428 static Reg AcquireNonLiveReg(RegMask *mask); 429 430 void AddParamRegsInLiveMasks(RegMask *live_regs, VRegMask *live_vregs, 431 const std::initializer_list<std::variant<Reg, Imm>> ¶ms); 432 433 void CreateStubCall(Inst *inst, RuntimeInterface::IntrinsicId intrinsicId, Reg dst, 434 const std::initializer_list<std::variant<Reg, Imm>> ¶ms); 435 436 ScopedTmpReg CalculatePreviousTLABAllocSize(Reg reg, LabelHolder::LabelId label); 437 friend class IntrinsicCodegenTest; 438 IntrinsicSlowPathEntry(IntrinsicInst * inst)439 virtual void IntrinsicSlowPathEntry([[maybe_unused]] IntrinsicInst *inst) 440 { 441 GetEncoder()->SetFalseResult(); 442 } IntrinsicCallRuntimeSaveAll(IntrinsicInst * inst)443 virtual void IntrinsicCallRuntimeSaveAll([[maybe_unused]] IntrinsicInst *inst) 444 { 445 GetEncoder()->SetFalseResult(); 446 } IntrinsicSaveRegisters(IntrinsicInst * inst)447 virtual void IntrinsicSaveRegisters([[maybe_unused]] IntrinsicInst *inst) 448 { 449 GetEncoder()->SetFalseResult(); 450 } IntrinsicRestoreRegisters(IntrinsicInst * inst)451 virtual void IntrinsicRestoreRegisters([[maybe_unused]] IntrinsicInst *inst) 452 { 453 GetEncoder()->SetFalseResult(); 454 } IntrinsicTailCall(IntrinsicInst * inst)455 virtual void IntrinsicTailCall([[maybe_unused]] IntrinsicInst *inst) 456 { 457 GetEncoder()->SetFalseResult(); 458 } 459 460 #include "codegen_language_extensions.h" 461 #include "intrinsics_codegen.inl.h" 462 463 private: 464 template <typename T> EncodeImms(const T & imms)465 void EncodeImms(const T &imms) 466 { 467 auto param_info = GetCallingConvention()->GetParameterInfo(0); 468 auto imm_type = DataType::INT32; 469 for (auto imm : imms) { 470 auto location = param_info->GetNextLocation(imm_type); 471 ASSERT(location.IsFixedRegister()); 472 auto dst_reg = ConvertRegister(location.GetValue(), imm_type); 473 GetEncoder()->EncodeMov(dst_reg, Imm(imm)); 474 } 475 } 476 477 private: 478 ArenaAllocator *allocator_; 479 ArenaAllocator *local_allocator_; 480 // Register description 481 RegistersDescription *regfile_; 482 // Encoder implementation 483 Encoder *enc_; 484 // Target architecture calling convention model 485 CallingConvention *callconv_; 486 // Current execution model implementation 487 // Visitor for instructions 488 GraphVisitor *visitor_ {}; 489 490 CodeInfoBuilder *code_builder_ {nullptr}; 491 492 ArenaVector<SlowPathBase *> slow_paths_; 493 ArenaUnorderedMap<RuntimeInterface::EntrypointId, SlowPathShared *> slow_paths_map_; 494 495 const CFrameLayout frame_layout_; // NOLINT(readability-identifier-naming) 496 497 ArenaVector<OsrEntryStub *> osr_entries_; 498 499 RuntimeInterface::MethodId method_id_ {INVALID_ID}; 500 501 size_t start_code_offset_ {0}; 502 503 ArenaVector<std::pair<int16_t, int16_t>> vreg_indices_; 504 505 RuntimeInterface *runtime_ {nullptr}; 506 507 LabelHolder::LabelId label_entry_ {}; 508 LabelHolder::LabelId label_exit_ {}; 509 510 FrameInfo *frame_info_ {nullptr}; 511 512 const Target target_; 513 514 /* Registers that have been allocated by regalloc */ 515 RegMask used_regs_ {0}; 516 RegMask used_vregs_ {0}; 517 518 /* Map of BasicBlock to live-out regsiters mask. It is needed in epilogue encoding to avoid overwriting of the 519 * live-out registers */ 520 ArenaUnorderedMap<const BasicBlock *, RegMask> live_outs_; 521 522 Disassembly disasm_; 523 524 SpillFillsResolver spill_fills_resolver_; 525 526 friend class EncodeVisitor; 527 friend class BaselineCodegen; 528 529 void CreateStubCall(RuntimeInterface::IntrinsicId intrinsicId, Reg dst, 530 const std::initializer_list<std::variant<Reg, Imm>> ¶ms); 531 }; // Codegen 532 533 class EncodeVisitor : public GraphVisitor { 534 using EntrypointId = RuntimeInterface::EntrypointId; 535 536 public: EncodeVisitor(Codegen * cg)537 explicit EncodeVisitor(Codegen *cg) : cg_(cg), arch_(cg->GetArch()) {} 538 539 EncodeVisitor() = delete; 540 GetBlocksToVisit()541 const ArenaVector<BasicBlock *> &GetBlocksToVisit() const override 542 { 543 return cg_->GetGraph()->GetBlocksRPO(); 544 } GetCodegen()545 Codegen *GetCodegen() const 546 { 547 return cg_; 548 } GetEncoder()549 Encoder *GetEncoder() 550 { 551 return cg_->GetEncoder(); 552 } GetArch()553 Arch GetArch() const 554 { 555 return arch_; 556 } GetCallingConvention()557 CallingConvention *GetCallingConvention() 558 { 559 return cg_->GetCallingConvention(); 560 } 561 GetRegfile()562 RegistersDescription *GetRegfile() 563 { 564 return cg_->GetRegfile(); 565 } 566 GetResult()567 bool GetResult() 568 { 569 return success_ && cg_->GetEncoder()->GetResult(); 570 } 571 572 // For each group of SpillFillData representing spill or fill operations and 573 // sharing the same source and destination types order by stack slot number in descending order. 574 static void SortSpillFillData(ArenaVector<SpillFillData> *spill_fills); 575 // Checks if two spill-fill operations could be coalesced into single operation over pair of arguments. 576 static bool CanCombineSpillFills(SpillFillData pred, SpillFillData succ, const CFrameLayout &fl, 577 const Graph *graph); 578 579 protected: 580 // UnaryOperation 581 static void VisitMov(GraphVisitor *visitor, Inst *inst); 582 static void VisitNeg(GraphVisitor *visitor, Inst *inst); 583 static void VisitAbs(GraphVisitor *visitor, Inst *inst); 584 static void VisitNot(GraphVisitor *visitor, Inst *inst); 585 static void VisitSqrt(GraphVisitor *visitor, Inst *inst); 586 587 // BinaryOperation 588 static void VisitAdd(GraphVisitor *visitor, Inst *inst); 589 static void VisitSub(GraphVisitor *visitor, Inst *inst); 590 static void VisitMul(GraphVisitor *visitor, Inst *inst); 591 static void VisitShl(GraphVisitor *visitor, Inst *inst); 592 static void VisitAShr(GraphVisitor *visitor, Inst *inst); 593 static void VisitAnd(GraphVisitor *visitor, Inst *inst); 594 static void VisitOr(GraphVisitor *visitor, Inst *inst); 595 static void VisitXor(GraphVisitor *visitor, Inst *inst); 596 597 // Binary Overflow Operation 598 static void VisitAddOverflow(GraphVisitor *v, Inst *inst); 599 static void VisitAddOverflowCheck(GraphVisitor *v, Inst *inst); 600 static void VisitSubOverflow(GraphVisitor *v, Inst *inst); 601 static void VisitSubOverflowCheck(GraphVisitor *v, Inst *inst); 602 603 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 604 #define BinaryImmOperation(opc) static void Visit##opc##I(GraphVisitor *visitor, Inst *inst); 605 606 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 607 #define BINARRY_IMM_OPS(DEF) DEF(Add) DEF(Sub) DEF(Shl) DEF(AShr) DEF(And) DEF(Or) DEF(Xor) 608 609 BINARRY_IMM_OPS(BinaryImmOperation) 610 611 #undef BINARRY_IMM_OPS 612 #undef BinaryImmOperation 613 614 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 615 #define BinarySignUnsignOperation(opc) static void Visit##opc(GraphVisitor *visitor, Inst *inst); 616 617 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 618 #define SIGN_UNSIGN_OPS(DEF) DEF(Div) DEF(Mod) DEF(Min) DEF(Max) DEF(Shr) 619 620 SIGN_UNSIGN_OPS(BinarySignUnsignOperation) 621 622 #undef SIGN_UNSIGN_OPS 623 #undef BinarySignUnsignOperation 624 625 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 626 #define BinaryShiftedRegisterOperationDef(opc, ignored) static void Visit##opc##SR(GraphVisitor *visitor, Inst *inst); 627 628 ENCODE_INST_WITH_SHIFTED_OPERAND(BinaryShiftedRegisterOperationDef) 629 630 #undef BinaryShiftedRegisterOperationDef 631 632 static void VisitShrI(GraphVisitor *visitor, Inst *inst); 633 634 static void VisitCast(GraphVisitor *visitor, Inst *inst); 635 636 static void VisitPhi([[maybe_unused]] GraphVisitor *visitor, [[maybe_unused]] Inst *inst); 637 638 static void VisitConstant(GraphVisitor *visitor, Inst *inst); 639 640 static void VisitNullPtr(GraphVisitor *visitor, Inst *inst); 641 642 // Next visitors use calling convention 643 static void VisitIndirectJump(GraphVisitor *visitor, Inst *inst); 644 645 static void VisitIf(GraphVisitor *visitor, Inst *inst); 646 647 static void VisitIfImm(GraphVisitor *visitor, Inst *inst); 648 649 static void VisitCompare(GraphVisitor *visitor, Inst *inst); 650 651 static void VisitCmp(GraphVisitor *visitor, Inst *inst); 652 653 // All next visitors use execution model for implementation 654 static void VisitReturnVoid(GraphVisitor *visitor, Inst * /* unused */); 655 656 static void VisitReturn(GraphVisitor *visitor, Inst *inst); 657 658 static void VisitReturnI(GraphVisitor *visitor, Inst *inst); 659 660 static void VisitReturnInlined(GraphVisitor *visitor, Inst * /* unused */); 661 662 static void VisitNewArray(GraphVisitor *visitor, Inst *inst); 663 664 static void VisitLoadConstArray(GraphVisitor *visitor, Inst *inst); 665 666 static void VisitFillConstArray(GraphVisitor *visitor, Inst *inst); 667 668 static void VisitParameter(GraphVisitor *visitor, Inst *inst); 669 670 static void VisitStoreArray(GraphVisitor *visitor, Inst *inst); 671 672 static void VisitSpillFill(GraphVisitor *visitor, Inst *inst); 673 674 static void VisitSaveState(GraphVisitor *visitor, Inst *inst); 675 676 static void VisitSaveStateDeoptimize(GraphVisitor *visitor, Inst *inst); 677 678 static void VisitSaveStateOsr(GraphVisitor *visitor, Inst *inst); 679 680 static void VisitLoadArray(GraphVisitor *visitor, Inst *inst); 681 682 static void VisitLoadCompressedStringChar(GraphVisitor *visitor, Inst *inst); 683 684 static void VisitLenArray(GraphVisitor *visitor, Inst *inst); 685 686 static void VisitNullCheck(GraphVisitor *visitor, Inst *inst); 687 688 static void VisitBoundsCheck(GraphVisitor *visitor, Inst *inst); 689 690 static void VisitZeroCheck(GraphVisitor *visitor, Inst *inst); 691 692 static void VisitRefTypeCheck(GraphVisitor *visitor, Inst *inst); 693 694 static void VisitNegativeCheck(GraphVisitor *visitor, Inst *inst); 695 696 static void VisitLoadString(GraphVisitor *visitor, Inst *inst); 697 698 static void VisitLoadObject(GraphVisitor *visitor, Inst *inst); 699 700 static void VisitUnresolvedLoadObject(GraphVisitor *visitor, Inst *inst); 701 702 static void VisitLoad(GraphVisitor *visitor, Inst *inst); 703 704 static void VisitStoreObject(GraphVisitor *visitor, Inst *inst); 705 706 static void VisitUnresolvedStoreObject(GraphVisitor *visitor, Inst *inst); 707 708 static void VisitStore(GraphVisitor *visitor, Inst *inst); 709 710 static void VisitLoadStatic(GraphVisitor *visitor, Inst *inst); 711 712 static void VisitUnresolvedLoadStatic(GraphVisitor *visitor, Inst *inst); 713 714 static void VisitStoreStatic(GraphVisitor *visitor, Inst *inst); 715 716 static void VisitUnresolvedStoreStatic(GraphVisitor *visitor, Inst *inst); 717 718 static void VisitNewObject(GraphVisitor *visitor, Inst *inst); 719 720 static void VisitLoadClass(GraphVisitor *visitor, Inst *inst); 721 722 static void VisitLoadAndInitClass(GraphVisitor *visitor, Inst *inst); 723 724 static void VisitUnresolvedLoadAndInitClass(GraphVisitor *visitor, Inst *inst); 725 726 static void VisitInitClass(GraphVisitor *visitor, Inst *inst); 727 728 static void VisitUnresolvedInitClass(GraphVisitor *visitor, Inst *inst); 729 730 static void VisitLoadType(GraphVisitor *visitor, Inst *inst); 731 732 static void VisitUnresolvedLoadType(GraphVisitor *visitor, Inst *inst); 733 734 static void VisitCheckCast(GraphVisitor *visitor, Inst *inst); 735 736 static void VisitIsInstance(GraphVisitor *visitor, Inst *inst); 737 738 static void VisitMonitor(GraphVisitor *visitor, Inst *inst); 739 740 static void VisitIntrinsic(GraphVisitor *visitor, Inst *inst); 741 742 static void VisitBuiltin(GraphVisitor *visitor, Inst *inst); 743 744 static void VisitBoundsCheckI(GraphVisitor *visitor, Inst *inst); 745 746 static void VisitStoreArrayI(GraphVisitor *visitor, Inst *inst); 747 748 static void VisitLoadArrayI(GraphVisitor *visitor, Inst *inst); 749 750 static void VisitLoadCompressedStringCharI(GraphVisitor *visitor, Inst *inst); 751 752 static void VisitLoadI(GraphVisitor *visitor, Inst *inst); 753 754 static void VisitStoreI(GraphVisitor *visitor, Inst *inst); 755 756 static void VisitMultiArray(GraphVisitor *visitor, Inst *inst); 757 758 static void VisitCallStatic(GraphVisitor *visitor, Inst *inst); 759 760 static void VisitUnresolvedCallStatic(GraphVisitor *visitor, Inst *inst); 761 762 static void VisitCallVirtual(GraphVisitor *visitor, Inst *inst); 763 764 static void VisitUnresolvedCallVirtual(GraphVisitor *visitor, Inst *inst); 765 766 static void VisitCallDynamic(GraphVisitor *visitor, Inst *inst); 767 768 static void VisitSafePoint(GraphVisitor *visitor, Inst *inst); 769 770 static void VisitSelect(GraphVisitor *visitor, Inst *inst); 771 772 static void VisitSelectImm(GraphVisitor *visitor, Inst *inst); 773 774 static void VisitLoadArrayPair(GraphVisitor *visitor, Inst *inst); 775 776 static void VisitLoadArrayPairI(GraphVisitor *visitor, Inst *inst); 777 778 static void VisitLoadPairPart(GraphVisitor *visitor, Inst *inst); 779 780 static void VisitStoreArrayPair(GraphVisitor *visitor, Inst *inst); 781 782 static void VisitStoreArrayPairI(GraphVisitor *visitor, Inst *inst); 783 784 static void VisitLoadExclusive(GraphVisitor *visitor, Inst *inst); 785 786 static void VisitStoreExclusive(GraphVisitor *visitor, Inst *inst); 787 788 static void VisitNOP(GraphVisitor *visitor, Inst *inst); 789 790 static void VisitThrow(GraphVisitor *visitor, Inst *inst); 791 792 static void VisitDeoptimizeIf(GraphVisitor *visitor, Inst *inst); 793 794 static void VisitDeoptimizeCompare(GraphVisitor *visitor, Inst *inst); 795 796 static void VisitDeoptimizeCompareImm(GraphVisitor *visitor, Inst *inst); 797 798 static void VisitDeoptimize(GraphVisitor *visitor, Inst *inst); 799 800 static void VisitIsMustDeoptimize(GraphVisitor *visitor, Inst *inst); 801 802 static void VisitMAdd(GraphVisitor *visitor, Inst *inst); 803 static void VisitMSub(GraphVisitor *visitor, Inst *inst); 804 static void VisitMNeg(GraphVisitor *visitor, Inst *inst); 805 static void VisitOrNot(GraphVisitor *visitor, Inst *inst); 806 static void VisitAndNot(GraphVisitor *visitor, Inst *inst); 807 static void VisitXorNot(GraphVisitor *visitor, Inst *inst); 808 static void VisitNegSR(GraphVisitor *visitor, Inst *inst); 809 810 static void VisitGetInstanceClass(GraphVisitor *visitor, Inst *inst); 811 static void VisitGetManagedClassObject(GraphVisitor *visito, Inst *inst); 812 static void VisitClassImmediate(GraphVisitor *visitor, Inst *inst); 813 static void VisitRegDef(GraphVisitor *visitor, Inst *inst); 814 static void VisitLiveIn(GraphVisitor *visitor, Inst *inst); 815 static void VisitLiveOut(GraphVisitor *visitor, Inst *inst); 816 static void VisitCallIndirect(GraphVisitor *visitor, Inst *inst); 817 static void VisitCall(GraphVisitor *visitor, Inst *inst); 818 819 // Dyn inst. 820 static void VisitCompareAnyType(GraphVisitor *visitor, Inst *inst); 821 static void VisitCastAnyTypeValue(GraphVisitor *visitor, Inst *inst); 822 static void VisitCastValueToAnyType(GraphVisitor *visitor, Inst *inst); 823 static void VisitAnyTypeCheck(GraphVisitor *visitor, Inst *inst); 824 VisitDefault(Inst * inst)825 void VisitDefault([[maybe_unused]] Inst *inst) override 826 { 827 #ifndef NDEBUG 828 COMPILER_LOG(DEBUG, CODEGEN) << "Can't encode instruction " << GetOpcodeString(inst->GetOpcode()) 829 << " with type " << DataType::ToString(inst->GetType()); 830 #endif 831 success_ = false; 832 } 833 834 // Helper functions 835 static void FillUnresolvedClass(GraphVisitor *visitor, Inst *inst); 836 static void FillObjectClass(GraphVisitor *visitor, Reg tmp_reg, LabelHolder::LabelId throw_label); 837 static void FillOtherClass(GraphVisitor *visitor, Inst *inst, Reg tmp_reg, LabelHolder::LabelId throw_label); 838 static void FillArrayObjectClass(GraphVisitor *visitor, Reg tmp_reg, LabelHolder::LabelId throw_label); 839 static void FillArrayClass(GraphVisitor *visitor, Inst *inst, Reg tmp_reg, LabelHolder::LabelId throw_label); 840 static void FillInterfaceClass(GraphVisitor *visitor, Inst *inst); 841 842 static void FillLoadClassUnresolved(GraphVisitor *visitor, Inst *inst); 843 844 static void FillCheckCast(GraphVisitor *visitor, Inst *inst, Reg src, LabelHolder::LabelId end_label, 845 compiler::ClassType klass_type); 846 847 static void FillIsInstanceUnresolved(GraphVisitor *visitor, Inst *inst); 848 849 static void FillIsInstanceCaseObject(GraphVisitor *visitor, Inst *inst, Reg tmp_reg); 850 851 static void FillIsInstanceCaseOther(GraphVisitor *visitor, Inst *inst, Reg tmp_reg, LabelHolder::LabelId end_label); 852 853 static void FillIsInstanceCaseArrayObject(GraphVisitor *visitor, Inst *inst, Reg tmp_reg, 854 LabelHolder::LabelId end_label); 855 856 static void FillIsInstanceCaseArrayClass(GraphVisitor *visitor, Inst *inst, Reg tmp_reg, 857 LabelHolder::LabelId end_label); 858 859 static void FillIsInstanceCaseInterface(GraphVisitor *visitor, Inst *inst); 860 861 static void FillIsInstance(GraphVisitor *visitor, Inst *inst, Reg tmp_reg, LabelHolder::LabelId end_label); 862 863 #include "optimizer/ir/visitor.inc" 864 865 private: 866 static void VisitDynamicMethodParameter(GraphVisitor *visitor, Inst *inst); 867 static void HandleDynParamPassed(const SpillFillData &sf, EncodeVisitor *enc); 868 static void HandleDynParamNotPassed(const SpillFillData &sf, EncodeVisitor *enc); 869 static void CastToAny(GraphVisitor *visitor, Inst *inst); 870 871 private: 872 Codegen *cg_; 873 Arch arch_; 874 bool success_ {true}; 875 }; // EncodeVisitor 876 877 } // namespace panda::compiler 878 879 #endif // COMPILER_OPTIMIZER_CODEGEN_CODEGEN_H_ 880