/art/compiler/utils/x86/ |
D | assembler_x86_test.cc | 274 GetAssembler()->LoadLongConstant(x86::XMM0, 51); in TEST_F() 310 GetAssembler()->repne_scasb(); in TEST_F() 316 GetAssembler()->repne_scasw(); in TEST_F() 322 GetAssembler()->repe_cmpsb(); in TEST_F() 328 GetAssembler()->repe_cmpsw(); in TEST_F() 334 GetAssembler()->repe_cmpsl(); in TEST_F() 340 GetAssembler()->rep_movsb(); in TEST_F() 346 GetAssembler()->rep_movsw(); in TEST_F() 450 GetAssembler()->cmovl(x86::kEqual, x86::Register(x86::EAX), x86::Address( in TEST_F() 452 GetAssembler()->cmovl(x86::kNotEqual, x86::Register(x86::EDI), x86::Address( in TEST_F() [all …]
|
/art/compiler/optimizing/ |
D | intrinsics_arm_vixl.cc | 63 ArmVIXLAssembler* IntrinsicCodeGeneratorARMVIXL::GetAssembler() { in GetAssembler() function in art::arm::IntrinsicCodeGeneratorARMVIXL 64 return codegen_->GetAssembler(); in GetAssembler() 93 ArmVIXLAssembler* assembler = down_cast<ArmVIXLAssembler*>(codegen->GetAssembler()); in EmitNativeCode() 182 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); in EmitNativeCode() 251 assembler_(codegen->GetAssembler()), in IntrinsicLocationsBuilderARMVIXL() 305 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 308 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 319 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 322 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 349 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfLeadingZeros() [all …]
|
D | intrinsics_mips64.cc | 41 Mips64Assembler* IntrinsicCodeGeneratorMIPS64::GetAssembler() { in GetAssembler() function in art::mips64::IntrinsicCodeGeneratorMIPS64 42 return reinterpret_cast<Mips64Assembler*>(codegen_->GetAssembler()); in GetAssembler() 49 #define __ codegen->GetAssembler()-> 168 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 177 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 204 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 213 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 254 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); in VisitIntegerReverseBytes() 263 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler()); in VisitLongReverseBytes() 272 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); in VisitShortReverseBytes() [all …]
|
D | intrinsics_x86_64.cc | 47 X86_64Assembler* IntrinsicCodeGeneratorX86_64::GetAssembler() { in GetAssembler() function in art::x86_64::IntrinsicCodeGeneratorX86_64 48 return down_cast<X86_64Assembler*>(codegen_->GetAssembler()); in GetAssembler() 72 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT 165 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 168 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 179 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 182 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 220 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); in VisitIntegerReverseBytes() 228 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt64, GetAssembler()); in VisitLongReverseBytes() 236 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); in VisitShortReverseBytes() [all …]
|
D | optimizing_cfi_test.cc | 69 code_gen_->GetAssembler()->cfi().SetEnabled(true); in SetUpFrame() 109 Assembler* opt_asm = code_gen_->GetAssembler(); in Check() 213 ->GetAssembler())->GetVIXLAssembler()-> in TEST_ISA() 243 #define __ down_cast<mips::MipsAssembler*>(GetCodeGenerator()->GetAssembler())-> in TEST_F() 273 #define __ down_cast<mips64::Mips64Assembler*>(GetCodeGenerator()->GetAssembler())-> in TEST_F()
|
D | code_generator_arm_vixl.cc | 354 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset); in SaveLiveRegisters() 388 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset); in RestoreLiveRegisters() 942 arm_codegen->GetAssembler()->LoadFromOffset(kLoadWord, temp_, obj_, monitor_offset); in EmitNativeCode() 1084 arm_codegen->GetAssembler()->LoadFromOffset(kLoadWord, temp1_, obj_, monitor_offset); in EmitNativeCode() 1158 arm_codegen->GetAssembler()->PoisonHeapReference(expected); in EmitNativeCode() 1163 arm_codegen->GetAssembler()->PoisonHeapReference(value); in EmitNativeCode() 1184 arm_codegen->GetAssembler()->UnpoisonHeapReference(expected); in EmitNativeCode() 1189 arm_codegen->GetAssembler()->UnpoisonHeapReference(value); in EmitNativeCode() 2419 #define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()-> // NOLINT 2423 GetAssembler()->FinalizeCode(); in Finalize() [all …]
|
D | intrinsics_mips.cc | 41 MipsAssembler* IntrinsicCodeGeneratorMIPS::GetAssembler() { in GetAssembler() function in art::mips::IntrinsicCodeGeneratorMIPS 42 return reinterpret_cast<MipsAssembler*>(codegen_->GetAssembler()); in GetAssembler() 61 #define __ codegen->GetAssembler()-> 184 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 193 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 225 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 234 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 411 GetAssembler()); in VisitIntegerReverseBytes() 425 GetAssembler()); in VisitLongReverseBytes() 439 GetAssembler()); in VisitShortReverseBytes() [all …]
|
D | intrinsics_x86.cc | 54 X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() { in GetAssembler() function in art::x86::IntrinsicCodeGeneratorX86 55 return down_cast<X86Assembler*>(codegen_->GetAssembler()); in GetAssembler() 79 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT 238 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 241 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 252 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 255 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 304 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); in VisitIntegerReverseBytes() 320 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() 333 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); in VisitShortReverseBytes() [all …]
|
D | code_generator_arm64.h | 261 Arm64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 262 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() 392 Arm64Assembler* GetAssembler() const; 394 return GetAssembler()->GetVIXLAssembler(); in GetVIXLAssembler() 442 Arm64Assembler* GetAssembler() OVERRIDE { return &assembler_; } in GetAssembler() function 443 const Arm64Assembler& GetAssembler() const OVERRIDE { return assembler_; } in GetAssembler() function 444 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() 846 inline Arm64Assembler* ParallelMoveResolverARM64::GetAssembler() const { in GetAssembler() function 847 return codegen_->GetAssembler(); in GetAssembler()
|
D | intrinsics_x86_64.h | 72 X86_64Assembler* GetAssembler();
|
D | intrinsics_arm_vixl.h | 72 ArmVIXLAssembler* GetAssembler();
|
D | intrinsics_mips.h | 76 MipsAssembler* GetAssembler();
|
D | intrinsics_x86.h | 72 X86Assembler* GetAssembler();
|
D | intrinsics_mips64.h | 72 Mips64Assembler* GetAssembler();
|
D | intrinsics_utils.h | 51 Assembler* assembler = codegen->GetAssembler(); in EmitNativeCode()
|
D | code_generator_arm_vixl.h | 247 ArmVIXLAssembler* GetAssembler() const; 315 ArmVIXLAssembler* GetAssembler() const { return assembler_; } in GetAssembler() function 316 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() 454 ArmVIXLAssembler* GetAssembler() OVERRIDE { return &assembler_; } in GetAssembler() function 456 const ArmVIXLAssembler& GetAssembler() const OVERRIDE { return assembler_; } in GetAssembler() function 458 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler()
|
D | code_generator_x86_64.h | 137 X86_64Assembler* GetAssembler() const; 203 X86_64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 343 X86_64Assembler* GetAssembler() OVERRIDE { in GetAssembler() function 347 const X86_64Assembler& GetAssembler() const OVERRIDE { in GetAssembler() function
|
D | code_generator_mips64.h | 147 Mips64Assembler* GetAssembler() const; 222 Mips64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 374 Mips64Assembler* GetAssembler() OVERRIDE { return &assembler_; } in GetAssembler() function 375 const Mips64Assembler& GetAssembler() const OVERRIDE { return assembler_; } in GetAssembler() function
|
D | code_generator_x86.h | 138 X86Assembler* GetAssembler() const; 201 X86Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 364 X86Assembler* GetAssembler() OVERRIDE { in GetAssembler() function 368 const X86Assembler& GetAssembler() const OVERRIDE { in GetAssembler() function
|
D | code_generator.cc | 339 start_offset_ = codegen_.GetAssembler().CodeSize(); in DisassemblyScope() 347 instruction_, start_offset_, codegen_.GetAssembler().CodeSize()); in ~DisassemblyScope() 365 code_start = GetAssembler()->CodeSize(); in GenerateSlowPaths() 371 disasm_info_->AddSlowPathInterval(slow_path, code_start, GetAssembler()->CodeSize()); in GenerateSlowPaths() 393 size_t frame_start = GetAssembler()->CodeSize(); in Compile() 395 DCHECK_EQ(GetAssembler()->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size_)); in Compile() 397 disasm_info_->SetFrameEntryInterval(frame_start, GetAssembler()->CodeSize()); in Compile() 438 size_t code_size = GetAssembler()->CodeSize(); in Finalize() 442 GetAssembler()->FinalizeInstructions(code); in Finalize() 978 uint32_t native_pc = GetAssembler()->CodePosition(); in RecordPcInfo() [all …]
|
D | code_generator_mips.h | 150 MipsAssembler* GetAssembler() const; 225 MipsAssembler* GetAssembler() const { return assembler_; } in GetAssembler() function 395 MipsAssembler* GetAssembler() OVERRIDE { return &assembler_; } in GetAssembler() function 396 const MipsAssembler& GetAssembler() const OVERRIDE { return assembler_; } in GetAssembler() function
|
D | intrinsics_arm64.cc | 187 codegen->GetAssembler()->MaybeUnpoisonHeapReference(tmp_reg); in EmitNativeCode() 212 codegen->GetAssembler()->MaybePoisonHeapReference(tmp_reg); in EmitNativeCode() 1064 codegen->GetAssembler()->PoisonHeapReference(temp.W()); in GenUnsafePut() 1224 codegen->GetAssembler()->PoisonHeapReference(expected); in GenCas() 1229 codegen->GetAssembler()->PoisonHeapReference(value); in GenCas() 1249 codegen->GetAssembler()->UnpoisonHeapReference(expected); in GenCas() 1254 codegen->GetAssembler()->UnpoisonHeapReference(value); in GenCas() 2688 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1); in VisitSystemArrayCopy() 2689 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp2); in VisitSystemArrayCopy() 2698 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3); in VisitSystemArrayCopy() [all …]
|
D | code_generator_arm64.cc | 1048 arm64_codegen->GetAssembler()->PoisonHeapReference(expected); in EmitNativeCode() 1053 arm64_codegen->GetAssembler()->PoisonHeapReference(value); in EmitNativeCode() 1074 arm64_codegen->GetAssembler()->UnpoisonHeapReference(expected); in EmitNativeCode() 1079 arm64_codegen->GetAssembler()->UnpoisonHeapReference(value); in EmitNativeCode() 1533 GetAssembler()->cfi().AdjustCFAOffset(frame_size); in GenerateFrameEntry() 1534 GetAssembler()->SpillRegisters(GetFramePreservedCoreRegisters(), in GenerateFrameEntry() 1536 GetAssembler()->SpillRegisters(GetFramePreservedFPRegisters(), in GenerateFrameEntry() 1550 GetAssembler()->cfi().RememberState(); in GenerateFrameExit() 1553 GetAssembler()->UnspillRegisters(GetFramePreservedFPRegisters(), in GenerateFrameExit() 1555 GetAssembler()->UnspillRegisters(GetFramePreservedCoreRegisters(), in GenerateFrameExit() [all …]
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64_test.cc | 1004 GetAssembler()->cvtsi2ss(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1007 GetAssembler()->cvtsi2ss(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1016 GetAssembler()->cvtsi2sd(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1019 GetAssembler()->cvtsi2sd(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1084 GetAssembler()->repne_scasb(); in TEST_F() 1090 GetAssembler()->repne_scasw(); in TEST_F() 1096 GetAssembler()->rep_movsw(); in TEST_F() 1609 GetAssembler()->psllw(x86_64::XmmRegister(x86_64::XMM0), x86_64::Immediate(1)); in TEST_F() 1610 GetAssembler()->psllw(x86_64::XmmRegister(x86_64::XMM15), x86_64::Immediate(2)); in TEST_F() 1616 GetAssembler()->pslld(x86_64::XmmRegister(x86_64::XMM0), x86_64::Immediate(1)); in TEST_F() [all …]
|
/art/compiler/utils/ |
D | jni_macro_assembler_test.h | 37 Ass* GetAssembler() { in GetAssembler() function
|