| /arkcompiler/ets_runtime/ecmascript/compiler/codegen/maple/maple_be/src/cg/aarch64/ |
| D | aarch64_peep.cpp | 47 if (insn.GetMachineOpcode() == MOP_xbl || insn.GetMachineOpcode() == MOP_tail_call_opt_xbl) { in GetReadBarrierName() 141 MOperator thisMop = insn.GetMachineOpcode(); in DoSSAOptimize() 310 MOperator curMop = insn.GetMachineOpcode(); in CheckCondition() 324 MOperator prevCmpMop = prevCmpInsn->GetMachineOpcode(); in CheckCondition() 341 MOperator prevCsetMop1 = prevCsetInsn1->GetMachineOpcode(); in CheckCondition() 366 MOperator curMop = insn.GetMachineOpcode(); in Run() 370 MOperator prevCsetMop = prevCsetInsn1->GetMachineOpcode(); in Run() 425 MOperator curMop = insn.GetMachineOpcode(); in CheckCondition() 434 MOperator prevMop = prevInsn->GetMachineOpcode(); in CheckCondition() 459 MOperator useMop = useInsn->GetMachineOpcode(); in CheckCondition() [all …]
|
| D | aarch64_optimize_common.cpp | 27 if (insn->GetMachineOpcode() == MOP_adrp_label) { in ModifyJumpTarget() 39 if (insn->GetMachineOpcode() == MOP_adrp_label) { in ModifyJumpTarget() 104 switch (insn.GetMachineOpcode()) { in IsCompareInsn() 141 switch (insn.GetMachineOpcode()) { in IsCompareAndBranchInsn() 154 switch (insn.GetMachineOpcode()) { in IsTestAndSetCCInsn() 167 switch (insn.GetMachineOpcode()) { in IsTestAndBranchInsn() 180 switch (insn.GetMachineOpcode()) { in IsAddOrSubInsn() 197 return (insn.GetMachineOpcode() == MOP_xuncond); in IsSimpleJumpInsn() 222 MOperator mOp = AArch64isa::FlipConditionOp(lastInsn->GetMachineOpcode()); in FlipIfBB()
|
| D | aarch64_alignment.cpp | 61 if (!insn->IsMachineInstruction() || insn->GetMachineOpcode() == MOP_pseudo_ret_int || in IsInSizeRange() 62 insn->GetMachineOpcode() == MOP_pseudo_ret_float) { in IsInSizeRange() 70 if (!insn->IsMachineInstruction() || insn->GetMachineOpcode() == MOP_pseudo_ret_int || in IsInSizeRange() 71 insn->GetMachineOpcode() == MOP_pseudo_ret_float) { in IsInSizeRange() 186 MOperator mOp = insn->GetMachineOpcode(); in UpdateInsnId() 191 if (insn->GetMachineOpcode() == MOP_adrp_ldr && CGOptions::IsLazyBinding() && in UpdateInsnId() 211 MOperator mOp = insn->GetMachineOpcode(); in MarkShortBranchSplit()
|
| D | aarch64_global.cpp | 155 MOperator defMop = insn.GetMachineOpcode(); in InsnDefOne() 176 MOperator defMop = insn.GetMachineOpcode(); in InsnDefZero() 200 MOperator defMop = insn.GetMachineOpcode(); in InsnDefOneOrZero() 258 if (useInsn->GetMachineOpcode() == MOP_asm) { in ReplaceAllUsedOpndWithNewOpnd() 311 if ((insn.GetMachineOpcode() != MOP_xmovrr) && (insn.GetMachineOpcode() != MOP_wmovrr) && in CheckCondition() 312 (insn.GetMachineOpcode() != MOP_xmovrr_uxtw)) { in CheckCondition() 317 if (firstOpnd.GetSize() != secondOpnd.GetSize() && insn.GetMachineOpcode() != MOP_xmovrr_uxtw) { in CheckCondition() 342 …if ((useInsn->GetMachineOpcode() == MOP_xmovkri16) || (useInsn->GetMachineOpcode() == MOP_wmovkri1… in CheckCondition() 346 if (useInsn->GetMachineOpcode() == MOP_asm) { in CheckCondition() 369 if (useInsn->GetMachineOpcode() == MOP_asm) { in Optimize() [all …]
|
| D | aarch64_ebo.cpp | 89 return ((insn.GetMachineOpcode() >= MOP_xvmovsr) && (insn.GetMachineOpcode() <= MOP_xvmovrd)); in IsFmov() 94 return ((insn.GetMachineOpcode() >= MOP_xaddrrr) && (insn.GetMachineOpcode() <= MOP_ssub)); in IsAdd() 121 MOperator mOp = insn.GetMachineOpcode(); in IsClinitCheck() 127 if (insn.GetMachineOpcode() == MOP_lazy_ldr_static) { in IsDecoupleStaticOp() 212 MOperator mOp = insn.GetMachineOpcode(); in IsSameRedefine() 218 sameInfo->insn->GetMachineOpcode() != mOp) { in IsSameRedefine() 316 DEBUG_ASSERT(insn->GetMachineOpcode() == MOP_asm, "insn should be a call insn."); in DefineAsmRegisters() 375 if (insn.GetMachineOpcode() != MOP_xret) { in DefineReturnUseRegister() 400 if (insn.GetMachineOpcode() == MOP_asm) { in DefineCallUseSpecialRegister() 517 MOperator mOp = insn.GetMachineOpcode(); in SimplifyBothConst() [all …]
|
| D | aarch64_validbit_opt.cpp | 22 MOperator curMop = insn.GetMachineOpcode(); in DoOpt() 55 MOperator mop = insn.GetMachineOpcode(); in SetValidBits() 221 MOperator mOp = insn.GetMachineOpcode(); in CheckCondition() 250 MOperator useMop = useInsn->GetMachineOpcode(); in CheckCondition() 289 MOperator mOp = insn.GetMachineOpcode(); in CheckCondition() 339 MOperator mOp = insn.GetMachineOpcode(); in Run() 381 MOperator useMop = useInsn->GetMachineOpcode(); in IsContinuousCmpCset() 395 MOperator ccUseMop = ccUseInsn->GetMachineOpcode(); in IsContinuousCmpCset() 411 MOperator defMop = defInsn.GetMachineOpcode(); in OpndDefByOneValidBit() 443 MOperator curMop = csetInsn.GetMachineOpcode(); in CheckCondition() [all …]
|
| D | aarch64_prop.cpp | 87 … if (curInsn->GetMachineOpcode() == MOP_wmovri32 || curInsn->GetMachineOpcode() == MOP_xmovri64) { in DoOpt() 113 const InsnDesc *md = &AArch64CG::kMd[(useInsn->GetMachineOpcode())]; in ZeroRegProp() 115 …bool isSpecficCase = useInsn->GetMachineOpcode() == MOP_wbfirri5i5 || useInsn->GetMachineOpcode() … in ZeroRegProp() 198 MOperator arithMop = arithInsn.GetMachineOpcode(); in GetFoldMopAndVal() 280 MOperator curMop = useInsn->GetMachineOpcode(); in MovConstReplace() 304 MOperator curMop = useInsn->GetMachineOpcode(); in ArithmeticConstReplace() 404 MOperator curMop = useInsn->GetMachineOpcode(); in ShiftConstReplace() 437 MOperator curMop = useDUInfo.GetInsn()->GetMachineOpcode(); in ConstProp() 484 MOperator curMop = useInsn->GetMachineOpcode(); in BitInsertReplace() 725 MOperator opCode = defInsn.GetMachineOpcode(); in SelectReplaceMem() [all …]
|
| D | aarch64_isa.cpp | 106 MOperator curMop = insn.GetMachineOpcode(); in GetJumpTargetIdx() 167 MOperator curMop = insn.GetMachineOpcode(); in IsSub() 185 MOperator curMop = insn.GetMachineOpcode(); in GetMopSub2Subs()
|
| D | aarch64_obj_emitter.cpp | 347 const InsnDesc &md = AArch64CG::kMd[insn.GetMachineOpcode()]; in GetBinaryCodeForInsn() 372 if (insn.GetMachineOpcode() == MOP_xuxtw64) { in GetBinaryCodeForInsn() 539 …(insn.GetMachineOpcode() == MOP_xadrp) ? FixupKind(kAArch64PCRelAdrpImm21) : FixupKind(kAArch64PCR… in GetAdrLabelOpndValue() 655 if (insn.GetMachineOpcode() == MOP_xmovrr) { in GenMovReg() 659 DEBUG_ASSERT(insn.GetMachineOpcode() == MOP_wmovrr, "support MOP_wmovrr Currently!"); in GenMovReg() 669 if (insn.GetMachineOpcode() == MOP_xmovrr) { in GenMovReg() 673 DEBUG_ASSERT(insn.GetMachineOpcode() == MOP_wmovrr, "support MOP_wmovrr Currently!"); in GenMovReg() 761 …opnd |= GetLo12LitrealOpndValue(insn.GetMachineOpcode(), insn.GetOperand(kInsnThirdOpnd), objFuncE… in GenAddPCRelAddrInsn() 877 … if (insn.GetMachineOpcode() == MOP_wubfizrri5i5 || insn.GetMachineOpcode() == MOP_xubfizrri6i6 || in GenBitfieldInsn() 878 insn.GetMachineOpcode() == MOP_wbfirri5i5 || insn.GetMachineOpcode() == MOP_xbfirri6i6) { in GenBitfieldInsn() [all …]
|
| D | aarch64_offset_adjust.cpp | 147 if (insn.GetMachineOpcode() == MOP_xsubrri12 || insn.GetMachineOpcode() == MOP_wsubrri12) { in AdjustmentOffsetForImmOpnd() 152 insn.SetMOP(AArch64CG::kMd[A64ConstProp::GetReversalMOP(insn.GetMachineOpcode())]); in AdjustmentOffsetForImmOpnd() 181 switch (insn.GetMachineOpcode()) { in AdjustmentStackPointer() 211 insn.SetMOP(AArch64CG::kMd[A64ConstProp::GetReversalMOP(insn.GetMachineOpcode())]); in AdjustmentStackPointer()
|
| D | aarch64_data_dep_base.cpp | 104 if ((stackDef->IsCall() && stackDef->GetMachineOpcode() != MOP_tls_desc_call) || in BuildDepsForMemUseCommon() 208 if (lastCallInsn != nullptr && lastCallInsn->GetMachineOpcode() != MOP_tls_desc_call) { in BuildDepsDefMem() 335 MOperator mOp = insn.GetMachineOpcode(); in BuildOpndDependency() 381 if (insn.GetMachineOpcode() == MOP_tls_desc_call) { in BuildSpecialCallDeps() 392 MOperator mOp = insn.GetMachineOpcode(); in BuildSpecialInsnDependency() 399 } else if (insn.IsClinit() || IsLazyLoad(insn.GetMachineOpcode()) || in BuildSpecialInsnDependency() 400 insn.GetMachineOpcode() == MOP_arrayclass_cache_ldr) { in BuildSpecialInsnDependency() 403 if (insn.GetMachineOpcode() != MOP_adrp_ldr) { in BuildSpecialInsnDependency() 537 MOperator mOp = depNode.GetInsn()->GetMachineOpcode(); in DumpNodeStyleInDot()
|
| D | aarch64_reaching.cpp | 316 if (insn->GetMachineOpcode() == MOP_asm) { in FindRegDefInBB() 368 if (insn->GetMachineOpcode() == MOP_asm) { in FindMemDefBetweenInsn() 384 if (!(insn->IsStore() || AArch64isa::IsPseudoInstruction(insn->GetMachineOpcode()))) { in FindMemDefBetweenInsn() 408 … (insn->IsStorePair() && offset == memOffset + GetEachMemSizeOfPair(insn->GetMachineOpcode()))) { in FindMemDefBetweenInsn() 431 if (insn->GetMachineOpcode() == MOP_asm) { in FindMemDefInBB() 443 if (!(insn->IsStore() || AArch64isa::IsPseudoInstruction(insn->GetMachineOpcode()))) { in FindMemDefInBB() 465 … if (insn->IsStorePair() && offset == memOffset + GetEachMemSizeOfPair(insn->GetMachineOpcode())) { in FindMemDefInBB() 738 if (insn->GetMachineOpcode() == MOP_asm) { in FindRegUsingBetweenInsn() 806 if (insn->GetMachineOpcode() == MOP_asm) { in FindRegUseBetweenInsn() 898 if (insn->GetMachineOpcode() == MOP_asm) { in FindMemUseBetweenInsn() [all …]
|
| D | aarch64_strldr.cpp | 283 MOperator mOp = insn.GetMachineOpcode(); in CheckNewAmount() 318 !a64CgFunc.IsOperandImmValid(insn.GetMachineOpcode(), newMemOpnd, opndIdx)) { in CheckNewMemOffset() 401 if (insn->GetMachineOpcode() == MOP_qstr) { in CanDoMemProp() 475 MOperator mOp = insn->GetMachineOpcode(); in DoStoreLoadOpt() 586 …bool subMode = defInsn.GetMachineOpcode() == MOP_wsubrri12 || defInsn.GetMachineOpcode() == MOP_xs… in GetOffsetForNewIndex() 587 …bool addMode = defInsn.GetMachineOpcode() == MOP_waddrri12 || defInsn.GetMachineOpcode() == MOP_xa… in GetOffsetForNewIndex()
|
| D | aarch64_utils.cpp | 25 MOperator loadMop = loadIns.GetMachineOpcode(); in GetOrCreateMemOperandForNewMOP()
|
| D | aarch64_fixshortbranch.cpp | 61 if (insn->GetMachineOpcode() == MOP_adrp_ldr && CGOptions::IsLazyBinding() && in SetInsnId() 131 MOperator thisMop = insn->GetMachineOpcode(); in FixShortBranches() 330 … if (insn->GetMachineOpcode() == MOP_xldli && insn->GetOperand(kInsnSecondOpnd).IsLabelOpnd()) { in FixLdr()
|
| D | aarch64_dependence.cpp | 236 …DEBUG_ASSERT(firstNode.GetInsn()->GetMachineOpcode() == MOP_adrp_ldr, "first insn should be adrpld… in CombineClinit() 237 …DEBUG_ASSERT(secondNode.GetInsn()->GetMachineOpcode() == MOP_clinit_tail, "second insn should be c… in CombineClinit() 259 MOperator thisMop = firstNode.GetInsn()->GetMachineOpcode(); in CombineMemoryAccessPair() 381 switch (insn.GetMachineOpcode()) { in GetNextMemOperand() 810 MOperator mOp = insn.GetMachineOpcode(); in BuildOpndDependency() 868 MOperator mOp = insn.GetMachineOpcode(); in BuildSpecialInsnDependency() 896 } else if (insn.IsClinit() || IsLazyLoad(insn.GetMachineOpcode()) || in BuildSpecialInsnDependency() 897 insn.GetMachineOpcode() == MOP_arrayclass_cache_ldr) { in BuildSpecialInsnDependency() 900 if (insn.GetMachineOpcode() != MOP_adrp_ldr) { in BuildSpecialInsnDependency()
|
| D | aarch64_ssa.cpp | 115 if (insn.GetMachineOpcode() == MOP_asm) { in CheckAsmDUbinding() 180 MOperator mOp = insn.GetMachineOpcode(); in DumpInsnInSSAForm() 238 bool isAsm = insn->GetMachineOpcode() == MOP_asm; in Visit() 307 if (insn->GetMachineOpcode() == MOP_asm) { in Visit()
|
| D | aarch64_reg_coalesce.cpp | 101 … if (insn.GetMachineOpcode() == MOP_asm && (i == kAsmOutputListOpnd || i == kAsmClobberListOpnd)) { in ComputeLiveIntervalsForEachDefOperand() 130 if (insn.GetMachineOpcode() == MOP_asm && i == kAsmInputListOpnd) { in ComputeLiveIntervalsForEachUseOperand() 220 MOperator mOp = insn.GetMachineOpcode(); in IsRegistersCopy()
|
| /arkcompiler/ets_runtime/ecmascript/compiler/codegen/maple/maple_be/src/cg/x86_64/ |
| D | x64_standardize.cpp | 39 X64MOP_t directlyMappingMop = GetMopFromAbstraceIRMop(insn.GetMachineOpcode()); in StdzMov() 51 X64MOP_t directlyMappingMop = GetMopFromAbstraceIRMop(insn.GetMachineOpcode()); in StdzBasicOp() 61 MOperator mOp = insn.GetMachineOpcode(); in StdzUnaryOp() 66 X64MOP_t directlyMappingMop = GetMopFromAbstraceIRMop(insn.GetMachineOpcode()); in StdzUnaryOp() 79 switch (insn.GetMachineOpcode()) { in StdzCvtOp() 94 MOperator directlyMappingMop = GetMopFromAbstraceIRMop(insn.GetMachineOpcode()); in StdzCvtOp() 125 MOperator mOp = insn.GetMachineOpcode(); in StdzFloatingNeg() 181 X64MOP_t directlyMappingMop = GetMopFromAbstraceIRMop(insn.GetMachineOpcode()); in StdzShiftOp()
|
| D | x64_optimize_common.cpp | 83 switch (insn.GetMachineOpcode()) { in IsCompareInsn() 113 return (insn.GetMachineOpcode() == x64::MOP_jmpq_l); in IsSimpleJumpInsn() 133 switch (insn.GetMachineOpcode()) { in IsAddOrSubInsn()
|
| D | x64_local_opt.cpp | 31 MOperator mOp = insn.GetMachineOpcode(); in CheckCondition() 82 if (insn.GetMachineOpcode() == MOP_pseudo_ret_int) { in propagateOperand()
|
| D | x64_peep.cpp | 67 MOperator thisMop = insn.GetMachineOpcode(); in DoNormalOptimize()
|
| /arkcompiler/ets_runtime/ecmascript/compiler/codegen/maple/maple_be/src/cg/ |
| D | dbg.cpp | 41 MOperator mOp = GetMachineOpcode(); in Dump() 54 DbgDescr &dbgDescr = dbgDescrTable[GetMachineOpcode()]; in CheckMD()
|
| D | cg_occur.cpp | 36 uint32 hIdx = (static_cast<uint32>(insn.GetMachineOpcode())) << k3ByteSize; in ComputeStmtWorkCandHashIndex()
|
| D | cfi.cpp | 44 MOperator mOp = GetMachineOpcode(); in Dump() 57 CfiDescr &cfiDescr = cfiDescrTable[GetMachineOpcode()]; in CheckMD()
|