/art/compiler/trampolines/ |
D | trampoline_compiler.cc | 61 ArmVIXLAssembler assembler(allocator); in CreateTrampoline() local 96 Arm64Assembler assembler(allocator); in CreateTrampoline() local 131 Riscv64Assembler assembler(allocator); in CreateTrampoline() local 162 X86Assembler assembler(allocator); in CreateTrampoline() local 183 x86_64::X86_64Assembler assembler(allocator); in CreateTrampoline() local
|
/art/compiler/optimizing/ |
D | intrinsics_riscv64.cc | 42 Riscv64Assembler* assembler = codegen->GetAssembler(); in EmitNativeCode() local 176 Riscv64Assembler* assembler = GetAssembler(); in VisitDoubleDoubleToRawLongBits() local 186 Riscv64Assembler* assembler = GetAssembler(); in VisitDoubleLongBitsToDouble() local 196 Riscv64Assembler* assembler = GetAssembler(); in VisitFloatFloatToRawIntBits() local 206 Riscv64Assembler* assembler = GetAssembler(); in VisitFloatIntBitsToFloat() local 216 Riscv64Assembler* assembler = GetAssembler(); in VisitDoubleIsInfinite() local 229 Riscv64Assembler* assembler = GetAssembler(); in VisitFloatIsInfinite() local 254 Riscv64Assembler* assembler = GetAssembler(); in VisitMemoryPeekByte() local 263 Riscv64Assembler* assembler = GetAssembler(); in VisitMemoryPeekIntNative() local 272 Riscv64Assembler* assembler = GetAssembler(); in VisitMemoryPeekLongNative() local [all …]
|
D | intrinsics_arm_vixl.cc | 87 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); in EmitNativeCode() local 178 static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveFPToInt() 188 static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmVIXLAssembler* assembler) { in MoveIntToFP() 259 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfLeadingZeros() local 304 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfTrailingZeros() local 350 ArmVIXLAssembler* assembler = GetAssembler(); in VisitMathSqrt() local 362 ArmVIXLAssembler* assembler = GetAssembler(); in VisitMathRint() local 379 ArmVIXLAssembler* assembler = GetAssembler(); in VisitMathRoundFloat() local 422 ArmVIXLAssembler* assembler = GetAssembler(); in VisitMemoryPeekByte() local 432 ArmVIXLAssembler* assembler = GetAssembler(); in VisitMemoryPeekIntNative() local [all …]
|
D | jit_patches_arm64.h | 49 JitPatchesARM64(Arm64Assembler* assembler, ArenaAllocator* allocator) : in JitPatchesARM64()
|
D | intrinsics_x86.cc | 74 static void GenArrayAddress(X86Assembler* assembler, in GenArrayAddress() 99 X86Assembler* assembler = x86_codegen->GetAssembler(); in EmitNativeCode() local 171 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveFPToInt() 186 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) { in MoveIntToFP() 253 X86Assembler* assembler) { in GenReverseBytes() 292 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() local 338 static void GenSSE41FPToFPIntrinsic(HInvoke* invoke, X86Assembler* assembler, int round_mode) { in GenSSE41FPToFPIntrinsic() 399 X86Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local 458 X86Assembler* assembler = codegen->GetAssembler(); in GenFPToFPCall() local 495 static void GenLowestOneBit(X86Assembler* assembler, in GenLowestOneBit() [all …]
|
D | intrinsics_x86_64.cc | 70 static void GenArrayAddress(X86_64Assembler* assembler, in GenArrayAddress() 96 X86_64Assembler* assembler = x86_64_codegen->GetAssembler(); in EmitNativeCode() local 158 static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveFPToInt() 164 static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86_64Assembler* assembler) { in MoveIntToFP() 232 X86_64Assembler* assembler = codegen->GetAssembler(); in GenIsInfinite() local 320 static void GenSSE41FPToFPIntrinsic(HInvoke* invoke, X86_64Assembler* assembler, int round_mode) { in GenSSE41FPToFPIntrinsic() 381 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundFloat() local 422 X86_64Assembler* assembler = GetAssembler(); in VisitMathRoundDouble() local 674 static void EmitCmplJLess(X86_64Assembler* assembler, in EmitCmplJLess() 688 static void CheckSystemArrayCopyPosition(X86_64Assembler* assembler, in CheckSystemArrayCopyPosition() [all …]
|
D | intrinsics_utils.h | 58 TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler()); in EmitNativeCode() local
|
D | code_generator_arm_vixl.cc | 122 EmitAdrCode(ArmVIXLMacroAssembler* assembler, vixl32::Register rd, vixl32::Label* label) in EmitAdrCode() 6179 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in CanEncode32BitConstantAsImmediate() local 9959 arm::ArmVIXLAssembler assembler(GetGraph()->GetAllocator()); in EmitThunkCode() local 10235 static void EmitGrayCheckAndFastPath(ArmVIXLAssembler& assembler, in EmitGrayCheckAndFastPath() 10262 static vixl32::Register LoadReadBarrierMarkIntrospectionEntrypoint(ArmVIXLAssembler& assembler) { in LoadReadBarrierMarkIntrospectionEntrypoint() 10274 void CodeGeneratorARMVIXL::CompileBakerReadBarrierThunk(ArmVIXLAssembler& assembler, in CompileBakerReadBarrierThunk()
|
D | code_generator_arm64.cc | 5442 Arm64Assembler assembler(GetGraph()->GetAllocator()); in EmitThunkCode() local 7226 static void EmitGrayCheckAndFastPath(arm64::Arm64Assembler& assembler, in EmitGrayCheckAndFastPath() 7265 static void LoadReadBarrierMarkIntrospectionEntrypoint(arm64::Arm64Assembler& assembler, in LoadReadBarrierMarkIntrospectionEntrypoint() 7274 void CodeGeneratorARM64::CompileBakerReadBarrierThunk(Arm64Assembler& assembler, in CompileBakerReadBarrierThunk()
|
D | intrinsics_arm64.cc | 1144 Arm64Assembler* assembler = codegen->GetAssembler(); in EmitLoadExclusive() local 1198 Arm64Assembler* assembler = codegen->GetAssembler(); in EmitStoreExclusive() local 1261 Arm64Assembler* assembler = codegen->GetAssembler(); in GenerateCompareAndSet() local 1358 Arm64Assembler* assembler = arm64_codegen->GetAssembler(); in EmitNativeCode() local
|
D | code_generator_riscv64.cc | 798 Riscv64Assembler* assembler = down_cast<CodeGeneratorRISCV64*>(codegen_)->GetAssembler(); in FpBinOp() local 857 Riscv64Assembler* assembler = down_cast<CodeGeneratorRISCV64*>(codegen_)->GetAssembler(); in FpUnOp() local
|
D | code_generator_x86_64.cc | 8406 X86_64Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 8435 X86_64Assembler* assembler = GetAssembler(); in Finalize() local
|
D | code_generator_x86.cc | 9089 X86Assembler* assembler = codegen_->GetAssembler(); in CreateJumpTable() local 9119 X86Assembler* assembler = GetAssembler(); in Finalize() local
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64_test.cc | 654 std::string shll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shll_fn() 675 std::string shlq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shlq_fn() 696 std::string shrl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrl_fn() 716 std::string shrq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in shrq_fn() 736 std::string sarl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarl_fn() 756 std::string sarq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in sarq_fn() 776 std::string rorl_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in rorl_fn() 796 std::string roll_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in roll_fn() 816 std::string rorq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in rorq_fn() 836 std::string rolq_fn(AssemblerX86_64Test::Base* assembler_test, x86_64::X86_64Assembler* assembler) { in rolq_fn() [all …]
|
D | jni_macro_assembler_x86_64.cc | 144 static void DecreaseFrameSizeImpl(size_t adjust, X86_64Assembler* assembler) { in DecreaseFrameSizeImpl()
|
/art/compiler/utils/riscv64/ |
D | assembler_riscv64.h | 2744 ScopedExtensionsOverride(Riscv64Assembler* assembler, Riscv64ExtensionMask enabled_extensions) in ScopedExtensionsOverride() 2755 static Riscv64ExtensionMask GetEnabledExtensions(Riscv64Assembler* assembler) { in GetEnabledExtensions() 2767 explicit ScopedExtensionsRestriction(Riscv64Assembler* assembler) in ScopedExtensionsRestriction() 2774 explicit ScopedExtensionsInclusion(Riscv64Assembler* assembler) in ScopedExtensionsInclusion() 2786 explicit ScratchRegisterScope(Riscv64Assembler* assembler) in ScratchRegisterScope()
|
D | assembler_riscv64.cc | 6381 void AddConstImpl(Riscv64Assembler* assembler, in AddConstImpl()
|
/art/compiler/utils/ |
D | assembler_thumb_test.cc | 93 ArmVIXLJNIMacroAssembler assembler; member in art::arm::ArmVIXLAssemblerTest
|
/art/compiler/utils/x86/ |
D | jni_macro_assembler_x86.cc | 115 static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) { in DecreaseFrameSizeImpl()
|
D | assembler_x86_test.cc | 495 std::string rorl_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in rorl_fn() 515 std::string roll_fn(AssemblerX86Test::Base* assembler_test, x86::X86Assembler* assembler) { in roll_fn()
|