| /art/compiler/utils/ |
| D | assembler_thumb_test.cc | 336 __ StoreToOffset(kStoreWord, R2, R4, 12); in TEST_F() local 337 __ StoreToOffset(kStoreWord, R2, R4, 0xfff); in TEST_F() local 338 __ StoreToOffset(kStoreWord, R2, R4, 0x1000); in TEST_F() local 339 __ StoreToOffset(kStoreWord, R2, R4, 0x1000a4); in TEST_F() local 340 __ StoreToOffset(kStoreWord, R2, R4, 0x101000); in TEST_F() local 341 __ StoreToOffset(kStoreWord, R4, R4, 0x101000); in TEST_F() local 342 __ StoreToOffset(kStoreHalfword, R2, R4, 12); in TEST_F() local 343 __ StoreToOffset(kStoreHalfword, R2, R4, 0xfff); in TEST_F() local 344 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000); in TEST_F() local 345 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000a4); in TEST_F() local [all …]
|
| /art/compiler/utils/mips/ |
| D | assembler_mips_test.cc | 1582 TEST_F(AssemblerMIPSTest, StoreToOffset) { in TEST_F() argument 1583 __ StoreToOffset(mips::kStoreByte, mips::A3, mips::A1, -0x8000); in TEST_F() local 1584 __ StoreToOffset(mips::kStoreByte, mips::A3, mips::A1, +0); in TEST_F() local 1585 __ StoreToOffset(mips::kStoreByte, mips::A3, mips::A1, +0x7FF8); in TEST_F() local 1586 __ StoreToOffset(mips::kStoreByte, mips::A3, mips::A1, +0x7FFB); in TEST_F() local 1587 __ StoreToOffset(mips::kStoreByte, mips::A3, mips::A1, +0x7FFC); in TEST_F() local 1588 __ StoreToOffset(mips::kStoreByte, mips::A3, mips::A1, +0x7FFF); in TEST_F() local 1589 __ StoreToOffset(mips::kStoreByte, mips::A3, mips::A1, -0xFFF0); in TEST_F() local 1590 __ StoreToOffset(mips::kStoreByte, mips::A3, mips::A1, -0x8008); in TEST_F() local 1591 __ StoreToOffset(mips::kStoreByte, mips::A3, mips::A1, -0x8001); in TEST_F() local [all …]
|
| D | assembler_mips.cc | 4753 void MipsAssembler::StoreToOffset(StoreOperandType type, in StoreToOffset() function in art::mips::MipsAssembler
|
| /art/compiler/utils/mips64/ |
| D | assembler_mips64_test.cc | 2081 TEST_F(AssemblerMIPS64Test, StoreToOffset) { in TEST_F() argument 2082 __ StoreToOffset(mips64::kStoreByte, mips64::A0, mips64::A0, 0); in TEST_F() local 2083 __ StoreToOffset(mips64::kStoreByte, mips64::A0, mips64::A1, 0); in TEST_F() local 2084 __ StoreToOffset(mips64::kStoreByte, mips64::A0, mips64::A1, 1); in TEST_F() local 2085 __ StoreToOffset(mips64::kStoreByte, mips64::A0, mips64::A1, 256); in TEST_F() local 2086 __ StoreToOffset(mips64::kStoreByte, mips64::A0, mips64::A1, 1000); in TEST_F() local 2087 __ StoreToOffset(mips64::kStoreByte, mips64::A0, mips64::A1, 0x7FFF); in TEST_F() local 2088 __ StoreToOffset(mips64::kStoreByte, mips64::A0, mips64::A1, 0x8000); in TEST_F() local 2089 __ StoreToOffset(mips64::kStoreByte, mips64::A0, mips64::A1, 0x8001); in TEST_F() local 2090 __ StoreToOffset(mips64::kStoreByte, mips64::A0, mips64::A1, 0x10000); in TEST_F() local [all …]
|
| D | assembler_mips64.cc | 3590 void Mips64Assembler::StoreToOffset(StoreOperandType type, in StoreToOffset() function in art::mips64::Mips64Assembler
|
| /art/compiler/optimizing/ |
| D | code_generator_mips64.cc | 1067 __ StoreToOffset(store_type, in Exchange() local 1071 __ StoreToOffset(store_type, TMP, SP, index1 + stack_offset); in Exchange() local 1129 __ StoreToOffset(kStoreDoubleword, reg, SP, ofs); in GenerateFrameEntry() local 1147 __ StoreToOffset(kStoreDoubleword, kMethodRegisterArgument, SP, kCurrentMethodStackOffset); in GenerateFrameEntry() local 1152 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag()); in GenerateFrameEntry() local 1341 __ StoreToOffset(store_type, in MoveLocation() local 1370 __ StoreToOffset(store_type, gpr, SP, destination.GetStackIndex()); in MoveLocation() local 1377 __ StoreToOffset(kStoreWord, TMP, SP, destination.GetStackIndex()); in MoveLocation() local 1380 __ StoreToOffset(kStoreDoubleword, TMP, SP, destination.GetStackIndex()); in MoveLocation() local 1448 __ StoreToOffset(store_type, reg_loc.AsRegister<GpuRegister>(), SP, mem_loc.GetStackIndex()); in SwapLocations() local [all …]
|
| D | code_generator_mips.cc | 1181 __ StoreToOffset(kStoreWord, TMP, SP, offset); in EmitSwap() local 1193 __ StoreToOffset(kStoreWord, TMP, SP, offset_l); in EmitSwap() local 1196 __ StoreToOffset(kStoreWord, TMP, SP, offset_h); in EmitSwap() local 1247 __ StoreToOffset(kStoreWord, in Exchange() local 1251 __ StoreToOffset(kStoreWord, TMP, SP, index1 + stack_offset); in Exchange() local 1332 __ StoreToOffset(kStoreWord, reg, SP, ofs); in GenerateFrameEntry() local 1349 __ StoreToOffset(kStoreWord, kMethodRegisterArgument, SP, kCurrentMethodStackOffset); in GenerateFrameEntry() local 1354 __ StoreToOffset(kStoreWord, ZERO, SP, GetStackOffsetOfShouldDeoptimizeFlag()); in GenerateFrameEntry() local 1496 __ StoreToOffset(kStoreDoubleword, source.AsRegisterPairLow<Register>(), SP, dst_offset); in MoveLocation() local 1503 __ StoreToOffset(kStoreWord, TMP, SP, dst_offset); in MoveLocation() local [all …]
|
| D | intrinsics_mips64.cc | 1756 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0); in VisitStringGetCharsNoCheck() local 2319 __ StoreToOffset(kStoreWord, in, out, info.value_offset); in VisitIntegerValueOf() local 2342 __ StoreToOffset(kStoreWord, ZERO, TR, offset); in VisitThreadInterrupted() local
|
| D | intrinsics_mips.cc | 2171 __ StoreToOffset(kStoreHalfword, TMP, dstPtr, 0); in VisitStringGetCharsNoCheck() local 2658 __ StoreToOffset(kStoreWord, in, out, info.value_offset); in VisitIntegerValueOf() local 2681 __ StoreToOffset(kStoreWord, ZERO, TR, offset); in VisitThreadInterrupted() local
|
| /art/compiler/utils/arm/ |
| D | assembler_arm_vixl.cc | 244 void ArmVIXLAssembler::StoreToOffset(StoreOperandType type, in StoreToOffset() function in art::arm::ArmVIXLAssembler
|
| /art/compiler/utils/arm64/ |
| D | jni_macro_assembler_arm64.cc | 113 void Arm64JNIMacroAssembler::StoreToOffset(XRegister source, XRegister base, int32_t offset) { in StoreToOffset() function in art::arm64::Arm64JNIMacroAssembler
|