Home
last modified time | relevance | path

Searched refs:kRet0 (Results 1 – 8 of 8) sorted by relevance

/art/compiler/dex/quick/
Dgen_common.cc409 RegStorage ref_reg = TargetReg(kRet0, kRef); in GenFilledNewArray()
516 m2l_->OpRegCopy(r_base_, m2l_->TargetReg(kRet0, kRef)); in Compile()
857 m2l_->OpRegCopy(rl_result_.reg, m2l_->TargetReg(kRet0, kRef)); in GenConstClass()
903 LoadRefDisp(TargetReg(kArg0, kRef), offset_of_string, TargetReg(kRet0, kRef), kNotVolatile); in GenConstString()
904 LIR* fromfast = OpCmpImmBranch(kCondEq, TargetReg(kRet0, kRef), 0, NULL); in GenConstString()
1102 m2l_->TargetReg(kRet0, kRef)); // Align usage with fast path in GenInstanceofCallingHelper()
1218 OpRegCopy(class_reg, TargetReg(kRet0, kRef)); // Align usage with fast path in GenCheckCast()
1249 m2l_->OpRegCopy(class_reg_, m2l_->TargetReg(kRet0, kRef)); // Align usage with fast path in GenCheckCast()
1806 int ret_reg = TargetReg(kRet0, kNotWide).GetReg(); in GenArithOpLong()
1839 ret_reg = TargetReg(kRet0, kNotWide).GetReg(); in GenArithOpLong()
[all …]
Dmir_to_lir.h1188 DCHECK((kArg0 <= reg && reg < kArg7) || (kFArg0 <= reg && reg < kFArg7) || (kRet0 == reg)); in TargetReg()
1195 COMPILE_ASSERT(kRet1 == kRet0 + 1, kret_range_unexpected); in TargetReg()
/art/compiler/dex/
Dcompiler_enums.h63 kRet0, enumerator
/art/compiler/dex/quick/mips/
Dtarget_mips.cc102 case kRet0: res_reg = rs_rMIPS_RET0; break; in TargetReg()
503 RegStorage reg_ret = RegStorage::MakeRegPair(TargetReg(kRet0), TargetReg(kRet1)); in GenAtomic64Load()
/art/compiler/dex/quick/arm/
Dtarget_arm.cc110 case kRet0: res_reg = rs_r0; break; in TargetReg()
/art/compiler/dex/quick/x86/
Dcodegen_x86.h95 (kRet0 == symbolic_reg)); in TargetReg()
Dtarget_x86.cc223 case kRet0: res_reg = rs_rX86_RET0; break; in TargetReg32()
/art/compiler/dex/quick/arm64/
Dtarget_arm64.cc130 case kRet0: res_reg = rs_w0; break; in TargetReg()