| /art/compiler/trampolines/ |
| D | trampoline_compiler.cc | 65 ArmVIXLAssembler assembler(allocator); in CreateTrampoline() local 103 Arm64Assembler assembler(allocator); in CreateTrampoline() local 142 MipsAssembler assembler(allocator); in CreateTrampoline() local 174 Mips64Assembler assembler(allocator); in CreateTrampoline() local 206 X86Assembler assembler(allocator); in CreateTrampoline() local 227 x86_64::X86_64Assembler assembler(allocator); in CreateTrampoline() local
|
| /art/compiler/optimizing/ |
| D | intrinsics_arm_vixl.cc | 89 ArmVIXLAssembler* assembler = down_cast<ArmVIXLAssembler*>(codegen->GetAssembler()); in EmitNativeCode() local 124 static void GenSystemArrayCopyBaseAddress(ArmVIXLAssembler* assembler, in GenSystemArrayCopyBaseAddress() 147 static void GenSystemArrayCopyEndAddress(ArmVIXLAssembler* assembler, in GenSystemArrayCopyEndAddress() 178 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); in EmitNativeCode() local 273 static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveFPToInt() 283 static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveIntToFP() 345 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfLeadingZeros() local 390 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfTrailingZeros() local 436 ArmVIXLAssembler* assembler = GetAssembler(); in VisitMathSqrt() local 448 ArmVIXLAssembler* assembler = GetAssembler(); in VisitMathRint() local [all …]
|
| D | intrinsics_mips64.cc | 155 static void MoveFPToInt(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MoveFPToInt() 191 static void MoveIntToFP(LocationSummary* locations, bool is64bit, Mips64Assembler* assembler) { in MoveIntToFP() 229 Mips64Assembler* assembler) { in GenReverseBytes() 281 Mips64Assembler* assembler) { in GenNumberOfLeadingZeroes() 312 Mips64Assembler* assembler) { in GenNumberOfTrailingZeroes() 349 Mips64Assembler* assembler) { in GenReverse() 394 Mips64Assembler* assembler) { in GenBitCount() 496 Mips64Assembler* assembler = GetAssembler(); in VisitMathSqrt() local 519 Mips64Assembler* assembler = GetAssembler(); in VisitMathRint() local 545 Mips64Assembler* assembler) { in GenRoundingMode() [all …]
|
| D | intrinsics_utils.h | 51 Assembler* assembler = codegen->GetAssembler(); in EmitNativeCode() local
|
| D | intrinsics_mips.cc | 166 static void MoveFPToInt(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MoveFPToInt() 207 static void MoveIntToFP(LocationSummary* locations, bool is64bit, MipsAssembler* assembler) { in MoveIntToFP() 255 MipsAssembler* assembler) { in GenReverse() 449 MipsAssembler* assembler) { in GenNumberOfLeadingZeroes() 497 MipsAssembler* assembler) { in GenNumberOfTrailingZeroes() 620 MipsAssembler* assembler) { in GenBitCount() 774 MipsAssembler* assembler = GetAssembler(); in VisitMathSqrt() local 787 MipsAssembler* assembler = GetAssembler(); in VisitMemoryPeekByte() local 800 MipsAssembler* assembler = GetAssembler(); in VisitMemoryPeekShortNative() local 832 MipsAssembler* assembler = GetAssembler(); in VisitMemoryPeekIntNative() local [all …]
|
| D | intrinsics_x86.cc | 194 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveFPToInt() 209 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveIntToFP() 276 X86Assembler* assembler) { in GenReverseBytes() 315 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() local 386 X86Assembler* assembler, in GenSSE41FPToFPIntrinsic() 463 X86Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local 522 X86Assembler* assembler = codegen->GetAssembler(); in GenFPToFPCall() local 559 static void GenLowestOneBit(X86Assembler* assembler, in GenLowestOneBit() 856 static void CheckPosition(X86Assembler* assembler, in CheckPosition() 921 X86Assembler* assembler = GetAssembler(); in VisitSystemArrayCopyChar() local [all …]
|
| D | intrinsics_x86_64.cc | 145 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveFPToInt() 151 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveIntToFP() 194 X86_64Assembler* assembler) { in GenReverseBytes() 294 X86_64Assembler* assembler, in GenSSE41FPToFPIntrinsic() 370 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local 414 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundDouble() local 664 static void CheckPosition(X86_64Assembler* assembler, in CheckPosition() 729 X86_64Assembler* assembler = GetAssembler(); in VisitSystemArrayCopyChar() local 823 static void GenSystemArrayCopyAddresses(X86_64Assembler* assembler, in GenSystemArrayCopyAddresses() 866 X86_64Assembler* assembler = GetAssembler(); in VisitSystemArrayCopy() local [all …]
|
| D | code_generator_arm_vixl.cc | 112 EmitAdrCode(ArmVIXLMacroAssembler* assembler, vixl32::Register rd, vixl32::Label* label) in EmitAdrCode() 5531 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in CanEncode32BitConstantAsImmediate() local 9031 arm::ArmVIXLAssembler assembler(GetGraph()->GetAllocator()); in EmitThunkCode() local 9308 static void EmitGrayCheckAndFastPath(ArmVIXLAssembler& assembler, in EmitGrayCheckAndFastPath() 9335 static vixl32::Register LoadReadBarrierMarkIntrospectionEntrypoint(ArmVIXLAssembler& assembler) { in LoadReadBarrierMarkIntrospectionEntrypoint() 9347 void CodeGeneratorARMVIXL::CompileBakerReadBarrierThunk(ArmVIXLAssembler& assembler, in CompileBakerReadBarrierThunk()
|
| D | code_generator_arm64.cc | 4468 Arm64Assembler assembler(GetGraph()->GetAllocator()); in EmitThunkCode() local 6174 static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler, in EmitGrayCheckAndFastPath() 6206 static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler, in LoadReadBarrierMarkIntrospectionEntrypoint() 6215 void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler, in CompileBakerReadBarrierThunk()
|
| D | intrinsics_arm64.cc | 1013 Arm64Assembler* assembler = arm64_codegen->GetAssembler(); in EmitNativeCode() local 1069 Arm64Assembler* assembler = codegen->GetAssembler(); in GenCas() local
|
| D | code_generator_x86_64.cc | 7511 X86_64Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 7540 X86_64Assembler* assembler = GetAssembler(); in Finalize() local
|
| D | code_generator_x86.cc | 8208 X86Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 8238 X86Assembler* assembler = GetAssembler(); in Finalize() local
|
| /art/compiler/utils/ |
| D | assembler_thumb_test.cc | 175 ArmVIXLJNIMacroAssembler assembler; member in art::arm::ArmVIXLAssemblerTest 180 void EmitAndCheck(ArmVIXLJNIMacroAssembler* assembler, const char* testname, in EmitAndCheck() 191 void EmitAndCheck(ArmVIXLJNIMacroAssembler* assembler, const char* testname) { in EmitAndCheck()
|
| /art/compiler/utils/x86_64/ |
| D | assembler_x86_64_test.cc | 634 std::string shll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shll_fn() 655 std::string shlq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shlq_fn() 676 std::string shrl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrl_fn() 696 std::string shrq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrq_fn() 716 std::string sarl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarl_fn() 736 std::string sarq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarq_fn() 756 std::string rorl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in rorl_fn() 776 std::string roll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in roll_fn() 796 std::string rorq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in rorq_fn() 816 std::string rolq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in rolq_fn() [all …]
|
| D | jni_macro_assembler_x86_64.cc | 144 static void DecreaseFrameSizeImpl(size_t adjust, X86_64Assembler* assembler) { in DecreaseFrameSizeImpl()
|
| /art/compiler/utils/x86/ |
| D | assembler_x86_test.cc | 389 std::string rorl_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in rorl_fn() 409 std::string roll_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in roll_fn()
|
| D | jni_macro_assembler_x86.cc | 113 static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) { in DecreaseFrameSizeImpl()
|