/art/compiler/dex/quick/ |
D | mir_to_lir.cc | 94 LoadBaseDisp(TargetPtrReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile); in LoadArg() 122 LoadBaseDisp(TargetPtrReg(kSp), offset, new_regs, k64, kNotVolatile); in LoadArg() 195 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile); in LoadArgDirect() 213 LoadBaseDisp(TargetPtrReg(kSp), offset, rl_dest.reg, k64, kNotVolatile); in LoadArgDirect() 670 GenArrayGet(opt_flags, k64, rl_src[0], rl_src[1], rl_dest, 3); in CompileDalvikInstruction() 691 GenArrayPut(opt_flags, k64, rl_src[1], rl_src[2], rl_src[0], 3, false); in CompileDalvikInstruction() 727 GenIGet(mir, opt_flags, k64, rl_dest, rl_src[0], true, false); in CompileDalvikInstruction() 748 GenIPut(mir, opt_flags, k64, rl_src[0], rl_src[1], true, false); in CompileDalvikInstruction()
|
D | gen_loadstore.cc | 129 LoadBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_src.s_reg_low), r_dest, k64, kNotVolatile); in LoadValueDirectWide() 308 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg, k64, kNotVolatile); in StoreValueWide() 372 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_dest.s_reg_low), rl_dest.reg, k64, kNotVolatile); in StoreFinalValueWide()
|
D | gen_invoke.cc | 841 StoreBaseDisp(TargetPtrReg(kSp), outs_offset, arg_reg, k64, kNotVolatile); in GenDalvikArgsNoRange() 925 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k64, kNotVolatile); in GenDalvikArgsRange() 1333 …RegLocation rl_i = (size == k64) ? LoadValueWide(rl_src_i, kCoreReg) : LoadValue(rl_src_i, kCoreRe… in GenInlinedReverseBytes() 1334 RegLocation rl_dest = (size == k64) ? InlineTargetWide(info) : InlineTarget(info); // result reg in GenInlinedReverseBytes() 1336 if (size == k64) { in GenInlinedReverseBytes() 1619 LoadBaseIndexed(rl_object.reg, rl_offset.reg, rl_result.reg, 0, k64); in GenInlinedUnsafeGet() 1623 LoadBaseDisp(rl_temp_offset, 0, rl_result.reg, k64, kNotVolatile); in GenInlinedUnsafeGet() 1667 StoreBaseIndexed(rl_object.reg, rl_offset.reg, rl_value.reg, 0, k64); in GenInlinedUnsafePut() 1671 StoreBaseDisp(rl_temp_offset, 0, rl_value.reg, k64, kNotVolatile); in GenInlinedUnsafePut()
|
D | dex_file_method_inliner.cc | 298 INTRINSIC(JavaLangLong, ReverseBytes, J_J, kIntrinsicReverseBytes, k64), 301 INTRINSIC(JavaLangLong, Reverse, J_J, kIntrinsicReverseBits, k64), 355 INTRINSIC(LibcoreIoMemory, PeekLongNative, J_J, kIntrinsicPeek, k64), 359 INTRINSIC(LibcoreIoMemory, PokeLongNative, JJ_V, kIntrinsicPoke, k64),
|
D | ralloc_util.cc | 763 StoreBaseDisp(TargetPtrReg(kSp), VRegOffset(v_reg), reg, k64, kNotVolatile); in FlushRegWide() 771 StoreBaseDisp(TargetPtrReg(kSp), VRegOffset(v_reg), reg, k64, kNotVolatile); in FlushRegWide()
|
D | mir_to_lir.h | 1502 return wide ? k64 : ref ? kReference : k32; in LoadStoreOpSize()
|
/art/compiler/dex/quick/arm64/ |
D | target_arm64.cc | 762 LoadBaseDisp(rs_xSELF, GetThreadOffset<8>(trampoline).Int32Value(), rs_xLR, k64, kNotVolatile); in LoadHelper() 878 *op_size = k64; in GetArgPhysicalReg() 955 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(start_vreg + i), reg, t_loc->wide ? k64 : k32, in FlushIns() 966 t_loc->wide ? k64 : k32, kNotVolatile); in FlushIns() 1062 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k64, kNotVolatile); in GenDalvikArgsRange() 1131 StoreBaseDisp(TargetPtrReg(kSp), out_offset, rl_arg.reg, k64, kNotVolatile); in GenDalvikArgsRange() 1134 StoreBaseDisp(TargetPtrReg(kSp), out_offset, regWide, k64, kNotVolatile); in GenDalvikArgsRange()
|
D | int_arm64.cc | 675 RegLocation rl_dest = (size == k64) ? InlineTargetWide(info) : InlineTarget(info); in GenInlinedPeek() 680 if (size == k64) { in GenInlinedPeek() 695 if (size == k64) { in GenInlinedPoke() 878 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, k64); in GenInlinedArrayCopyCharArray() 879 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, k64); in GenInlinedArrayCopyCharArray() 1205 if (size == k64 || size == kDouble) { in GenArrayPut() 1689 ArmOpcode wide = (size == k64) ? WIDE(0) : UNWIDE(0); in GenInlinedReverseBits() 1691 RegLocation rl_dest = (size == k64) ? InlineTargetWide(info) : InlineTarget(info); // result reg in GenInlinedReverseBits() 1693 …RegLocation rl_i = (size == k64) ? LoadValueWide(rl_src_i, kCoreReg) : LoadValue(rl_src_i, kCoreRe… in GenInlinedReverseBits() 1695 (size == k64) ? StoreValueWide(rl_dest, rl_result) : StoreValue(rl_dest, rl_result); in GenInlinedReverseBits()
|
D | utility_arm64.cc | 1041 DCHECK(size == k64 || size == kDouble); in LoadBaseIndexed() 1059 case k64: in LoadBaseIndexed() 1129 DCHECK(size == k64 || size == kDouble); in StoreBaseIndexed() 1147 case k64: in StoreBaseIndexed() 1207 case k64: in LoadBaseDispBody() 1304 case k64: in StoreBaseDispBody()
|
/art/compiler/dex/quick/mips/ |
D | utility_mips.cc | 473 case k64: in LoadBaseDispBody() 554 if (UNLIKELY(is_volatile == kVolatile && (size == k64 || size == kDouble))) { in LoadBaseDisp() 584 case k64: in StoreBaseDispBody() 663 if (UNLIKELY(is_volatile == kVolatile && (size == k64 || size == kDouble))) { in StoreBaseDisp()
|
D | int_mips.cc | 496 if (size == k64 || size == kDouble) { in GenArrayGet() 516 if ((size == k64) || (size == kDouble)) { in GenArrayGet() 560 if (size == k64 || size == kDouble) { in GenArrayPut() 596 if ((size == k64) || (size == kDouble)) { in GenArrayPut()
|
D | target_mips.cc | 565 if (size == k64 || size == kDouble) { in RegClassForFieldLoadStore()
|
/art/compiler/dex/quick/arm/ |
D | utility_arm.cc | 707 DCHECK((size == k64) || (size == kDouble)); in LoadBaseIndexed() 773 DCHECK((size == k64) || (size == kDouble)); in StoreBaseIndexed() 866 case k64: in LoadBaseDispBody() 973 (size == k64 || size == kDouble) && in LoadBaseDisp() 1008 case k64: in StoreBaseDispBody() 1097 (size == k64 || size == kDouble) && in StoreBaseDisp()
|
D | int_arm.cc | 749 if (size == k64) { in GenInlinedPeek() 773 if (size == k64) { in GenInlinedPoke() 1379 if (size == k64 || size == kDouble) { in GenArrayPut()
|
D | target_arm.cc | 545 if (size == k64 || size == kDouble) { in RegClassForFieldLoadStore()
|
/art/compiler/dex/quick/x86/ |
D | fp_x86.cc | 148 StoreBaseDisp(rs_rX86_SP, src_v_reg_offset, rl_src.reg, k64, kNotVolatile); in GenLongToFP() 182 LoadBaseDisp(rs_rX86_SP, dest_v_reg_offset, rl_result.reg, k64, kNotVolatile); in GenLongToFP() 367 StoreBaseDisp(rs_rX86_SP, src1_v_reg_offset, rl_src1.reg, is_double ? k64 : k32, in GenRemFP() 378 StoreBaseDisp(rs_rX86_SP, src2_v_reg_offset, rl_src2.reg, is_double ? k64 : k32, in GenRemFP() 439 LoadBaseDisp(rs_rX86_SP, dest_v_reg_offset, rl_result.reg, k64, kNotVolatile); in GenRemFP()
|
D | target_x86.cc | 697 OpSize size = cu_->target64 ? k64 : k32; in SpillCoreRegs() 714 OpSize size = cu_->target64 ? k64 : k32; in UnSpillCoreRegs() 733 k64, kNotVolatile); in SpillFPRegs() 747 k64, kNotVolatile); in UnSpillFPRegs() 770 if (size == k64 || size == kDouble) { in RegClassForFieldLoadStore() 2028 case k64: in GenShiftLeftVector() 2081 case k64: in GenUnsignedShiftRightVector() 2290 case k64: in GenSetVector() 2521 StoreBaseDisp(rs_rX86_SP, SRegOffset(start_vreg + i), reg, t_loc->wide ? k64 : k32, in FlushIns() 2532 t_loc->wide ? k64 : k32, kNotVolatile); in FlushIns() [all …]
|
D | utility_x86.cc | 640 bool is64bit = ((size == k64) || (size == kDouble)); in LoadBaseIndexedDisp() 643 case k64: in LoadBaseIndexedDisp() 787 bool is64bit = (size == k64) || (size == kDouble); in StoreBaseIndexedDisp() 790 case k64: in StoreBaseIndexedDisp()
|
D | call_x86.cc | 295 cu_->target64 ? k64 : k32, kNotVolatile); in GenEntrySequence()
|
D | int_x86.cc | 858 RegLocation rl_dest = size == k64 ? InlineTargetWide(info) : InlineTarget(info); in GenInlinedPeek() 862 if (size == k64) { in GenInlinedPeek() 882 if (size == k64) { in GenInlinedPoke() 2067 if (size == k64 || size == kDouble) { in GenArrayGet() 2097 if ((size == k64) || (size == kDouble)) { in GenArrayGet() 2114 if (size == k64 || size == kDouble) { in GenArrayPut() 2143 if ((size == k64) || (size == kDouble)) { in GenArrayPut()
|
/art/compiler/dex/ |
D | compiler_enums.h | 265 k64, enumerator
|