/art/compiler/optimizing/ |
D | code_generator_mips.cc | 229 __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); in EmitNativeCode() local 288 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index); in EmitNativeCode() local 917 __ LoadConst32(dst, value); in MoveConstant() local 963 __ LoadConst32(dst, value); in MoveConstant() local 1099 __ LoadConst32(AT, mirror::Class::kStatusInitialized); in GenerateClassInitializationCheck() local 1281 __ LoadConst32(TMP, low); in HandleBinaryOp() local 1290 __ LoadConst32(TMP, high); in HandleBinaryOp() local 1302 __ LoadConst32(TMP, low); in HandleBinaryOp() local 1311 __ LoadConst32(TMP, high); in HandleBinaryOp() local 1321 __ LoadConst32(TMP, low); in HandleBinaryOp() local [all …]
|
D | intrinsics_mips.cc | 285 __ LoadConst32(AT, 0x00FF00FF); in GenReverse() local 296 __ LoadConst32(AT, 0x0F0F0F0F); in GenReverse() local 302 __ LoadConst32(AT, 0x33333333); in GenReverse() local 308 __ LoadConst32(AT, 0x55555555); in GenReverse() local 344 __ LoadConst32(AT, 0x00FF00FF); in GenReverse() local 362 __ LoadConst32(AT, 0x0F0F0F0F); in GenReverse() local 373 __ LoadConst32(AT, 0x33333333); in GenReverse() local 384 __ LoadConst32(AT, 0x55555555); in GenReverse() local 542 __ LoadConst32(TMP, 32); in GenNumberOfTrailingZeroes() local 549 __ LoadConst32(TMP, 32); in GenNumberOfTrailingZeroes() local [all …]
|
D | code_generator_mips64.cc | 188 __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); in EmitNativeCode() local 243 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index); in EmitNativeCode() local 689 __ LoadConst32(gpr, value); in MoveLocation() local 768 __ LoadConst32(gpr, value); in MoveLocation() local 862 __ LoadConst32(location.AsRegister<GpuRegister>(), value); in MoveConstant() local 982 __ LoadConst32(AT, mirror::Class::kStatusInitialized); in GenerateClassInitializationCheck() local 1722 __ LoadConst32(rhs, value); in VisitCompare() local 1739 __ LoadConst32(res, 0); in VisitCompare() local 1743 __ LoadConst32(res, -1); in VisitCompare() local 1745 __ LoadConst32(res, 1); in VisitCompare() local [all …]
|
D | intrinsics_mips64.cc | 1496 __ LoadConst32(tmp_reg, std::numeric_limits<uint16_t>::max()); in GenerateStringIndexOf() local
|
/art/compiler/utils/mips/ |
D | assembler_mips.cc | 1359 void MipsAssembler::LoadConst32(Register rd, int32_t value) { in LoadConst32() function in art::mips::MipsAssembler 1376 LoadConst32(reg_lo, low); in LoadConst64() 1378 LoadConst32(reg_hi, high); in LoadConst64() 1390 LoadConst32(AT, offset); in StoreConst32ToOffset() 1398 LoadConst32(temp, value); in StoreConst32ToOffset() 1410 LoadConst32(AT, offset); in StoreConst64ToOffset() 1420 LoadConst32(temp, low); in StoreConst64ToOffset() 1427 LoadConst32(temp, high); in StoreConst64ToOffset() 1437 LoadConst32(temp, value); in LoadSConst32() 1448 LoadConst32(temp, low); in LoadDConst64() [all …]
|
D | assembler_mips.h | 356 void LoadConst32(Register rd, int32_t value);
|
/art/compiler/utils/mips64/ |
D | assembler_mips64.cc | 1035 void Mips64Assembler::LoadConst32(GpuRegister rd, int32_t value) { in LoadConst32() function in art::mips64::Mips64Assembler 1808 LoadConst32(AT, offset & ~(kMips64DoublewordSize - 1)); in LoadFromOffset() 1853 LoadConst32(AT, offset & ~(kMips64DoublewordSize - 1)); in LoadFpuFromOffset() 1911 LoadConst32(AT, offset & ~(kMips64DoublewordSize - 1)); in StoreToOffset() 1948 LoadConst32(AT, offset & ~(kMips64DoublewordSize - 1)); in StoreFpuToOffset() 2108 LoadConst32(scratch.AsGpuRegister(), imm); in StoreImmediateToFrame() 2347 LoadConst32(out_reg.AsGpuRegister(), 0); in CreateHandleScopeEntry() 2388 LoadConst32(out_reg.AsGpuRegister(), 0); in LoadReferenceFromHandleScope()
|
D | assembler_mips64.h | 324 void LoadConst32(GpuRegister rd, int32_t value);
|