| /art/compiler/trampolines/ |
| D | trampoline_compiler.cc | 54 Thumb2Assembler assembler(arena); in CreateTrampoline() local 84 Arm64Assembler assembler(arena); in CreateTrampoline() local 123 MipsAssembler assembler(arena); in CreateTrampoline() local 155 Mips64Assembler assembler(arena); in CreateTrampoline() local 187 X86Assembler assembler(arena); in CreateTrampoline() local 208 x86_64::X86_64Assembler assembler(arena); in CreateTrampoline() local
|
| /art/compiler/optimizing/ |
| D | intrinsics_mips64.cc | 149 static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MoveFPToInt() 186 static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MoveIntToFP() 225 Mips64Assembler* assembler) { in GenReverseBytes() 277 Mips64Assembler* assembler) { in GenNumberOfLeadingZeroes() 308 Mips64Assembler* assembler) { in GenNumberOfTrailingZeroes() 345 Mips64Assembler* assembler) { in GenReverse() 388 static void MathAbsFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MathAbsFP() 425 static void GenAbsInteger(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in GenAbsInteger() 461 Mips64Assembler* assembler) { in GenMinMaxFP() 574 Mips64Assembler* assembler) { in GenMinMax() [all …]
|
| D | intrinsics_arm.cc | 84 static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { in MoveFPToInt() 96 static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { in MoveIntToFP() 154 ArmAssembler* assembler) { in GenNumberOfLeadingZeros() 196 ArmAssembler* assembler) { in GenNumberOfTrailingZeros() 243 static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) { in MathAbsFP() 283 ArmAssembler* assembler) { in GenAbsInteger() 331 ArmAssembler* assembler) { in GenMinMax() 374 ArmAssembler* assembler = GetAssembler(); in VisitMathSqrt() local 384 ArmAssembler* assembler = GetAssembler(); in VisitMemoryPeekByte() local 395 ArmAssembler* assembler = GetAssembler(); in VisitMemoryPeekIntNative() local [all …]
|
| D | intrinsics_x86_64.cc | 98 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveFPToInt() 104 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveIntToFP() 148 X86_64Assembler* assembler) { in GenReverseBytes() 209 X86_64Assembler* assembler, in MathAbsFP() 253 static void GenAbsInteger(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in GenAbsInteger() 294 X86_64Assembler* assembler, in GenMinMaxFP() 423 X86_64Assembler* assembler) { in GenMinMax() 552 X86_64Assembler* assembler, in GenSSE41FPToFPIntrinsic() 631 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local 681 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundDouble() local [all …]
|
| D | intrinsics_x86.cc | 111 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveFPToInt() 126 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveIntToFP() 196 X86Assembler* assembler) { in GenReverseBytes() 235 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() local 275 X86Assembler* assembler, in MathAbsFP() 333 static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) { in GenAbsInteger() 361 static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) { in GenAbsLong() 406 X86Assembler* assembler, in GenMinMaxFP() 569 X86Assembler* assembler) { in GenMinMax() 732 X86Assembler* assembler, in GenSSE41FPToFPIntrinsic() [all …]
|
| D | intrinsics_mips.cc | 160 static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MoveFPToInt() 202 static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MoveIntToFP() 251 MipsAssembler* assembler) { in GenReverse() 445 MipsAssembler* assembler) { in GenNumberOfLeadingZeroes() 493 MipsAssembler* assembler) { in GenNumberOfTrailingZeroes() 616 MipsAssembler* assembler) { in GenBitCount() 748 static void MathAbsFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MathAbsFP() 777 static void GenAbsInteger(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in GenAbsInteger() 827 MipsAssembler* assembler) { in GenMinMaxFP() 1060 MipsAssembler* assembler) { in GenMinMax() [all …]
|
| D | intrinsics_utils.h | 51 Assembler* assembler = codegen->GetAssembler(); in EmitNativeCode() local
|
| D | intrinsics_arm.h | 37 ArmAssembler* assembler, in IntrinsicLocationsBuilderARM()
|
| D | code_generator_x86_64.cc | 6682 X86_64Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 6711 X86_64Assembler* assembler = GetAssembler(); in Finalize() local
|
| D | code_generator_x86.cc | 7258 X86Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 7288 X86Assembler* assembler = GetAssembler(); in Finalize() local
|
| D | code_generator_arm.cc | 3991 ArmAssembler* assembler = codegen_->GetAssembler(); in CanEncodeConstantAsImmediate() local
|
| /art/compiler/utils/x86_64/ |
| D | assembler_x86_64_test.cc | 363 std::string shll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shll_fn() 386 std::string shlq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shlq_fn() 409 std::string shrl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrl_fn() 432 std::string shrq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrq_fn() 455 std::string sarl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarl_fn() 478 std::string sarq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarq_fn() 501 std::string rorl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in rorl_fn() 524 std::string roll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in roll_fn() 547 std::string rorq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in rorq_fn() 570 std::string rolq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in rolq_fn() [all …]
|
| /art/compiler/linker/arm/ |
| D | relative_patcher_thumb2.cc | 84 arm::Thumb2Assembler assembler(&arena); in CompileThunkCode() local
|
| /art/compiler/utils/x86/ |
| D | assembler_x86_test.cc | 280 std::string rorl_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in rorl_fn() 303 std::string roll_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in roll_fn()
|
| /art/compiler/linker/arm64/ |
| D | relative_patcher_arm64.cc | 252 arm64::Arm64Assembler assembler(&arena); in CompileThunkCode() local
|
| /art/compiler/utils/arm/ |
| D | assembler_thumb2.h | 578 ArrayRef<const FixupId> Dependents(const Thumb2Assembler& assembler) const { in Dependents()
|
| D | assembler_arm.cc | 578 static void EmitLoad(ArmAssembler* assembler, ManagedRegister m_dst, in EmitLoad()
|
| D | assembler_thumb2.cc | 31 void Thumb2Assembler::Fixup::ForExpandableDependencies(Thumb2Assembler* assembler, Function fn) { in ForExpandableDependencies() 54 void Thumb2Assembler::Fixup::PrepareDependents(Thumb2Assembler* assembler) { in PrepareDependents()
|
| /art/compiler/utils/ |
| D | assembler_thumb_test.cc | 177 void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname, in EmitAndCheck() 188 void EmitAndCheck(arm::Thumb2Assembler* assembler, const char* testname) { in EmitAndCheck() 204 arm::Thumb2Assembler assembler; member in art::arm::Thumb2AssemblerTest
|