/art/compiler/utils/x86/ |
D | assembler_x86_test.cc | 310 GetAssembler()->LoadLongConstant(x86::XMM0, 51); in TEST_F() 411 GetAssembler()->repne_scasb(); in TEST_F() 417 GetAssembler()->repne_scasw(); in TEST_F() 423 GetAssembler()->repe_cmpsb(); in TEST_F() 429 GetAssembler()->repe_cmpsw(); in TEST_F() 435 GetAssembler()->repe_cmpsl(); in TEST_F() 441 GetAssembler()->rep_movsb(); in TEST_F() 447 GetAssembler()->rep_movsw(); in TEST_F() 453 GetAssembler()->rep_movsl(); in TEST_F() 569 GetAssembler()->cmovl(x86::kEqual, x86::Register(x86::EAX), x86::Address( in TEST_F() [all …]
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64_test.cc | 1151 GetAssembler()->cvtsi2ss(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1154 GetAssembler()->cvtsi2ss(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1163 GetAssembler()->cvtsi2sd(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1166 GetAssembler()->cvtsi2sd(x86_64::XmmRegister(x86_64::XMM0), in TEST_F() 1231 GetAssembler()->repne_scasb(); in TEST_F() 1237 GetAssembler()->repne_scasw(); in TEST_F() 1243 GetAssembler()->rep_movsb(); in TEST_F() 1249 GetAssembler()->rep_movsw(); in TEST_F() 1255 GetAssembler()->rep_movsl(); in TEST_F() 2088 GetAssembler()->psllw(x86_64::XmmRegister(x86_64::XMM0), x86_64::Immediate(1)); in TEST_F() [all …]
|
/art/compiler/optimizing/ |
D | optimizing_cfi_test.cc | 58 code_gen_->GetAssembler()->cfi().SetEnabled(true); in SetUpFrame() 98 Assembler* opt_asm = code_gen_->GetAssembler(); in Check() 174 ->GetAssembler())->GetVIXLAssembler()-> in TEST_ISA()
|
D | intrinsics_arm_vixl.cc | 65 ArmVIXLAssembler* IntrinsicCodeGeneratorARMVIXL::GetAssembler() { in GetAssembler() function in art::arm::IntrinsicCodeGeneratorARMVIXL 66 return codegen_->GetAssembler(); in GetAssembler() 87 ArmVIXLAssembler* assembler = arm_codegen->GetAssembler(); in EmitNativeCode() 152 assembler_(codegen->GetAssembler()), in IntrinsicLocationsBuilderARMVIXL() 206 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 209 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 220 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 223 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 259 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfLeadingZeros() 304 ArmVIXLAssembler* assembler = codegen->GetAssembler(); in GenNumberOfTrailingZeros() [all …]
|
D | jit_patches_arm64.h | 88 Arm64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 89 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler()
|
D | code_generator_arm_vixl.cc | 320 arm_codegen->GetAssembler()->StoreRegisterList(core_spills, orig_offset); in SaveLiveRegisters() 354 arm_codegen->GetAssembler()->LoadRegisterList(core_spills, orig_offset); in RestoreLiveRegisters() 2017 #define __ reinterpret_cast<ArmVIXLAssembler*>(GetAssembler())->GetVIXLAssembler()-> // NOLINT 2028 CompileBakerReadBarrierThunk(*GetAssembler(), encoded_data, /* debug_name= */ nullptr); in Finalize() 2031 GetAssembler()->FinalizeCode(); in Finalize() 2156 assembler_(codegen->GetAssembler()), in InstructionCodeGeneratorARMVIXL() 2204 GetAssembler()->LoadFromOffset( in GenerateMethodEntryExitHook() 2290 GetAssembler()->cfi().AdjustCFAOffset(kArmWordSize); in MaybeIncrementHotness() 2291 GetAssembler()->LoadFromOffset(kLoadWord, kMethodRegister, sp, kArmWordSize); in MaybeIncrementHotness() 2303 GetAssembler()->cfi().AdjustCFAOffset(-static_cast<int>(kArmWordSize)); in MaybeIncrementHotness() [all …]
|
D | intrinsics_x86_64.cc | 49 X86_64Assembler* IntrinsicCodeGeneratorX86_64::GetAssembler() { in GetAssembler() function in art::x86_64::IntrinsicCodeGeneratorX86_64 50 return down_cast<X86_64Assembler*>(codegen_->GetAssembler()); in GetAssembler() 96 X86_64Assembler* assembler = x86_64_codegen->GetAssembler(); in EmitNativeCode() 178 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 181 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 192 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 195 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 232 X86_64Assembler* assembler = codegen->GetAssembler(); in GenIsInfinite() 306 GetAssembler()->sqrtsd(out, in); in VisitMathSqrt() 333 GenSSE41FPToFPIntrinsic(invoke, GetAssembler(), 2); in VisitMathCeil() [all …]
|
D | intrinsics_riscv64.cc | 42 Riscv64Assembler* assembler = codegen->GetAssembler(); in EmitNativeCode() 101 Riscv64Assembler* IntrinsicCodeGeneratorRISCV64::GetAssembler() { in GetAssembler() function in art::riscv64::IntrinsicCodeGeneratorRISCV64 102 return codegen_->GetAssembler(); in GetAssembler() 176 Riscv64Assembler* assembler = GetAssembler(); in VisitDoubleDoubleToRawLongBits() 186 Riscv64Assembler* assembler = GetAssembler(); in VisitDoubleLongBitsToDouble() 196 Riscv64Assembler* assembler = GetAssembler(); in VisitFloatFloatToRawIntBits() 206 Riscv64Assembler* assembler = GetAssembler(); in VisitFloatIntBitsToFloat() 216 Riscv64Assembler* assembler = GetAssembler(); in VisitDoubleIsInfinite() 229 Riscv64Assembler* assembler = GetAssembler(); in VisitFloatIsInfinite() 254 Riscv64Assembler* assembler = GetAssembler(); in VisitMemoryPeekByte() [all …]
|
D | intrinsics_x86.cc | 53 X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() { in GetAssembler() function in art::x86::IntrinsicCodeGeneratorX86 54 return down_cast<X86Assembler*>(codegen_->GetAssembler()); in GetAssembler() 99 X86Assembler* assembler = x86_codegen->GetAssembler(); in EmitNativeCode() 210 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleDoubleToRawLongBits() 213 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ true, GetAssembler()); in VisitDoubleLongBitsToDouble() 224 MoveFPToInt(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatFloatToRawIntBits() 227 MoveIntToFP(invoke->GetLocations(), /* is64bit= */ false, GetAssembler()); in VisitFloatIntBitsToFloat() 276 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt32, GetAssembler()); in VisitIntegerReverseBytes() 292 X86Assembler* assembler = GetAssembler(); in VisitLongReverseBytes() 305 GenReverseBytes(invoke->GetLocations(), DataType::Type::kInt16, GetAssembler()); in VisitShortReverseBytes() [all …]
|
D | intrinsics_riscv64.h | 72 Riscv64Assembler* GetAssembler();
|
D | intrinsics_arm_vixl.h | 70 ArmVIXLAssembler* GetAssembler();
|
D | intrinsics_x86_64.h | 70 X86_64Assembler* GetAssembler();
|
D | intrinsics_x86.h | 70 X86Assembler* GetAssembler();
|
D | code_generator_arm64.h | 333 Arm64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 334 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() 599 Arm64Assembler* GetAssembler() const; 601 return GetAssembler()->GetVIXLAssembler(); in GetVIXLAssembler() 659 Arm64Assembler* GetAssembler() override { return &assembler_; } in GetAssembler() function 660 const Arm64Assembler& GetAssembler() const override { return assembler_; } in GetAssembler() function 661 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() 1204 inline Arm64Assembler* ParallelMoveResolverARM64::GetAssembler() const { in GetAssembler() function 1205 return codegen_->GetAssembler(); in GetAssembler()
|
D | code_generator_riscv64.cc | 259 #define __ down_cast<CodeGeneratorRISCV64*>(codegen)->GetAssembler()-> // NOLINT 278 riscv64::ScratchRegisterScope srs(riscv64_codegen->GetAssembler()); in EmitNativeCode() 791 #define __ down_cast<Riscv64Assembler*>(GetAssembler())-> // NOLINT 798 Riscv64Assembler* assembler = down_cast<CodeGeneratorRISCV64*>(codegen_)->GetAssembler(); in FpBinOp() 857 Riscv64Assembler* assembler = down_cast<CodeGeneratorRISCV64*>(codegen_)->GetAssembler(); in FpUnOp() 934 riscv64::ScratchRegisterScope srs(GetAssembler()); in Store() 985 ScratchRegisterScope srs(GetAssembler()); in StoreSeqCst() 1054 Riscv64Assembler* ParallelMoveResolverRISCV64::GetAssembler() const { in GetAssembler() function in art::riscv64::ParallelMoveResolverRISCV64 1055 return codegen_->GetAssembler(); in GetAssembler() 1094 riscv64::ScratchRegisterScope srs(GetAssembler()); in Exchange() [all …]
|
D | code_generator.cc | 259 start_offset_ = codegen_.GetAssembler().CodeSize(); in DisassemblyScope() 267 instruction_, start_offset_, codegen_.GetAssembler().CodeSize()); in ~DisassemblyScope() 285 code_start = GetAssembler()->CodeSize(); in GenerateSlowPaths() 291 disasm_info_->AddSlowPathInterval(slow_path, code_start, GetAssembler()->CodeSize()); in GenerateSlowPaths() 321 size_t frame_start = GetAssembler()->CodeSize(); in Compile() 323 DCHECK_EQ(GetAssembler()->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size_)); in Compile() 325 disasm_info_->SetFrameEntryInterval(frame_start, GetAssembler()->CodeSize()); in Compile() 370 GetStackMapStream()->EndMethod(GetAssembler()->CodeSize()); in Compile() 374 GetAssembler()->FinalizeCode(); in Finalize() 1134 RecordPcInfo(instruction, dex_pc, GetAssembler()->CodePosition(), slow_path, native_debug_info); in RecordPcInfo() [all …]
|
D | optimizing_compiler.cc | 744 ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()), in Emit() 853 codegen->GetAssembler()->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo()); in TryCompile() 1005 codegen->GetAssembler()->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo()); in TryCompileIntrinsic() 1410 codegen->GetAssembler()->CodeSize(), in JitCompile() 1423 codegen->EmitJitRoots(const_cast<uint8_t*>(codegen->GetAssembler()->CodeBufferBaseAddress()), in JitCompile() 1449 info.code_size = codegen->GetAssembler()->CodeSize(), in JitCompile() 1452 info.cfi = ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()); in JitCompile() 1481 jit_logger->WriteLog(code, codegen->GetAssembler()->CodeSize(), method); in JitCompile()
|
D | code_generator_x86_64.h | 201 X86_64Assembler* GetAssembler() const; 273 X86_64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 445 X86_64Assembler* GetAssembler() override { in GetAssembler() function 449 const X86_64Assembler& GetAssembler() const override { in GetAssembler() function
|
D | code_generator_arm_vixl.h | 352 ArmVIXLAssembler* GetAssembler() const; 422 ArmVIXLAssembler* GetAssembler() const { return assembler_; } in GetAssembler() function 423 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler() 571 ArmVIXLAssembler* GetAssembler() override { return &assembler_; } in GetAssembler() function 573 const ArmVIXLAssembler& GetAssembler() const override { return assembler_; } in GetAssembler() function 575 ArmVIXLMacroAssembler* GetVIXLAssembler() { return GetAssembler()->GetVIXLAssembler(); } in GetVIXLAssembler()
|
D | code_generator_x86.h | 208 X86Assembler* GetAssembler() const; 277 X86Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 467 X86Assembler* GetAssembler() override { in GetAssembler() function 471 const X86Assembler& GetAssembler() const override { in GetAssembler() function
|
D | code_generator_riscv64.h | 201 Riscv64Assembler* GetAssembler() const; 284 Riscv64Assembler* GetAssembler() const { return assembler_; } in GetAssembler() function 475 Riscv64Assembler* GetAssembler() override { return &assembler_; } in GetAssembler() function 476 const Riscv64Assembler& GetAssembler() const override { return assembler_; } in GetAssembler() function
|
D | code_generator.h | 223 virtual Assembler* GetAssembler() = 0; 224 virtual const Assembler& GetAssembler() const = 0; 733 return ArrayRef<const uint8_t>(GetAssembler().CodeBufferBaseAddress(), in GetCode() 734 GetAssembler().CodeSize()); in GetCode()
|
D | intrinsics_utils.h | 58 TAssembler* assembler = down_cast<TAssembler*>(codegen->GetAssembler()); in EmitNativeCode()
|
/art/compiler/utils/ |
D | jni_macro_assembler_test.h | 39 Ass* GetAssembler() { in GetAssembler() function
|
/art/compiler/utils/riscv64/ |
D | assembler_riscv64_test.cc | 26 #define __ GetAssembler()-> 74 exclusion_(test->GetAssembler()) {} in ScopedCSuppression() 85 exclusion_(test->GetAssembler()) {} in ScopedZbaAndCSuppression() 98 : smo_(test, "-march=rv64imafdcv_zba"), exclusion_(test->GetAssembler()) {} in ScopedZbbSuppression() 111 : smo_(test, "-march=rv64imafdcv_zbb"), exclusion_(test->GetAssembler()) {} in ScopedZbaSuppression() 125 exclusion_(test->GetAssembler()) {} in ScopedZbbAndCSuppression() 138 : smo_(test, "-march=rv64imafdv"), exclusion_(test->GetAssembler()) {} in ScopedZbaZbbAndCSuppression() 340 const size_t nop_size = GetAssembler()->IsExtensionEnabled(Riscv64Extension::kZca) ? in EmitNops() 364 ScratchRegisterScope srs(GetAssembler()); in TestLoadConst64() 628 (GetAssembler()->*f)(A0, reg, &label, is_bare); in TestBcondA0RegForward() [all …]
|