| /art/compiler/utils/x86/ |
| D | jni_macro_assembler_x86.cc | 169 X86ManagedRegister scratch = mscratch.AsX86(); in StoreStackOffsetToThread() local 323 X86ManagedRegister scratch = mscratch.AsX86(); in CopyRef() local 332 X86ManagedRegister scratch = mscratch.AsX86(); in CopyRawPtrFromThread() local 341 X86ManagedRegister scratch = mscratch.AsX86(); in CopyRawPtrToThread() local 350 X86ManagedRegister scratch = mscratch.AsX86(); in Copy() local 373 ManagedRegister scratch, in Copy() 386 Register scratch = mscratch.AsX86().AsCpuRegister(); in Copy() local 397 ManagedRegister scratch, in Copy() 411 Register scratch = mscratch.AsX86().AsCpuRegister(); in Copy() local 450 X86ManagedRegister scratch = mscratch.AsX86(); in CreateHandleScopeEntry() local [all …]
|
| /art/compiler/utils/arm64/ |
| D | jni_macro_assembler_arm64.cc | 160 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in StoreImmediateToFrame() local 170 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in StoreStackOffsetToThread() local 188 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in StoreSpanning() local 363 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in CopyRawPtrFromThread() local 372 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in CopyRawPtrToThread() local 379 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in CopyRef() local 391 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in Copy() local 410 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in Copy() local 432 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in Copy() local 463 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in Copy() local [all …]
|
| D | jni_macro_assembler_arm64.h | 186 Arm64Exception(Arm64ManagedRegister scratch, size_t stack_adjust) in Arm64Exception()
|
| D | assembler_arm64.cc | 96 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in JumpTo() local
|
| /art/compiler/utils/arm/ |
| D | jni_macro_assembler_arm_vixl.cc | 268 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in StoreSpanning() local 279 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in CopyRef() local 315 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in StoreImmediateToFrame() local 342 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in CopyRawPtrFromThread() local 358 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in StoreStackOffsetToThread() local 425 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in Copy() local 531 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in CreateHandleScopeEntry() local 577 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in Call() local 586 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in Call() local 614 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in ExceptionPoll() local [all …]
|
| D | jni_macro_assembler_arm_vixl.h | 210 ArmException(ArmManagedRegister scratch, size_t stack_adjust) in ArmException()
|
| /art/compiler/utils/x86_64/ |
| D | jni_macro_assembler_x86_64.cc | 208 X86_64ManagedRegister scratch = mscratch.AsX86_64(); in StoreStackOffsetToThread() local 373 X86_64ManagedRegister scratch = mscratch.AsX86_64(); in CopyRef() local 382 X86_64ManagedRegister scratch = mscratch.AsX86_64(); in CopyRawPtrFromThread() local 391 X86_64ManagedRegister scratch = mscratch.AsX86_64(); in CopyRawPtrToThread() local 401 X86_64ManagedRegister scratch = mscratch.AsX86_64(); in Copy() local 424 ManagedRegister scratch, in Copy() 437 CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister(); in Copy() local 448 ManagedRegister scratch, in Copy() 462 CpuRegister scratch = mscratch.AsX86_64().AsCpuRegister(); in Copy() local 507 X86_64ManagedRegister scratch = mscratch.AsX86_64(); in CreateHandleScopeEntry() local [all …]
|
| /art/compiler/optimizing/ |
| D | parallel_move_resolver.cc | 264 int scratch = -1; in AllocateScratchRegister() local 381 for (Location scratch : scratches_) { in AddScratchLocation() local 458 Location scratch = AllocateScratchLocationFor(kind); in PerformMove() local 495 Location scratch = scratches_[i]; in PerformMove() local
|
| D | code_generator_vector_arm_vixl.cc | 903 /*out*/ vixl32::Register* scratch) { in VecAddress() 929 /*out*/ vixl32::Register* scratch) { in VecAddressUnaligned() 959 vixl32::Register scratch; in VisitVecLoad() local 1010 vixl32::Register scratch; in VisitVecStore() local
|
| D | parallel_move_test.cc | 116 Location scratch = GetScratchLocation(kind); in AllocateScratchLocationFor() local
|
| D | code_generator_vector_arm64.cc | 1403 /*out*/ Register* scratch) { in VecAddress() 1440 Register scratch; in VisitVecLoad() local 1498 Register scratch; in VisitVecStore() local
|
| D | code_generator_arm64.cc | 1030 Location scratch = GetScratchLocation(kind); in AllocateScratchLocationFor() local
|
| /art/compiler/utils/ |
| D | swap_space_test.cc | 37 ScratchFile scratch; in SwapTest() local
|
| /art/dex2oat/ |
| D | dex2oat_image_test.cc | 119 ScratchFile scratch; in CompileImageAndGetSizes() local
|
| /art/compiler/utils/mips64/ |
| D | assembler_mips64.cc | 3737 Mips64ManagedRegister scratch = mscratch.AsMips64(); in StoreImmediateToFrame() local 3746 Mips64ManagedRegister scratch = mscratch.AsMips64(); in StoreStackOffsetToThread() local 3759 Mips64ManagedRegister scratch = mscratch.AsMips64(); in StoreSpanning() local 3836 Mips64ManagedRegister scratch = mscratch.AsMips64(); in CopyRef() local 3845 Mips64ManagedRegister scratch = mscratch.AsMips64(); in CopyRawPtrFromThread() local 3854 Mips64ManagedRegister scratch = mscratch.AsMips64(); in CopyRawPtrToThread() local 3864 Mips64ManagedRegister scratch = mscratch.AsMips64(); in Copy() local 3880 GpuRegister scratch = mscratch.AsMips64().AsGpuRegister(); in Copy() local 3897 GpuRegister scratch = mscratch.AsMips64().AsGpuRegister(); in Copy() local 3923 GpuRegister scratch = mscratch.AsMips64().AsGpuRegister(); in Copy() local [all …]
|
| D | assembler_mips64.h | 399 explicit Mips64ExceptionSlowPath(Mips64ManagedRegister scratch, size_t stack_adjust) in Mips64ExceptionSlowPath()
|
| /art/compiler/utils/mips/ |
| D | assembler_mips.cc | 4918 MipsManagedRegister scratch = mscratch.AsMips(); in StoreImmediateToFrame() local 4927 MipsManagedRegister scratch = mscratch.AsMips(); in StoreStackOffsetToThread() local 4941 MipsManagedRegister scratch = mscratch.AsMips(); in StoreSpanning() local 5027 MipsManagedRegister scratch = mscratch.AsMips(); in CopyRef() local 5036 MipsManagedRegister scratch = mscratch.AsMips(); in CopyRawPtrFromThread() local 5047 MipsManagedRegister scratch = mscratch.AsMips(); in CopyRawPtrToThread() local 5056 MipsManagedRegister scratch = mscratch.AsMips(); in Copy() local 5072 Register scratch = mscratch.AsMips().AsCoreRegister(); in Copy() local 5080 Register scratch = mscratch.AsMips().AsCoreRegister(); in Copy() local 5098 Register scratch = mscratch.AsMips().AsCoreRegister(); in Copy() local [all …]
|
| D | assembler_mips.h | 248 explicit MipsExceptionSlowPath(MipsManagedRegister scratch, size_t stack_adjust) in MipsExceptionSlowPath()
|