Searched refs:kX86_64PointerSize (Results 1 – 8 of 8) sorted by relevance
/art/runtime/arch/x86_64/ |
D | callee_save_frame_x86_64.h | 80 1 /* Method* */) * static_cast<size_t>(kX86_64PointerSize), kStackAlignment); in GetFrameSize() 92 POPCOUNT(GetFpSpills(type))) * static_cast<size_t>(kX86_64PointerSize); in GetFpr1Offset() 98 POPCOUNT(GetCoreSpills(type)) * static_cast<size_t>(kX86_64PointerSize); in GetGpr1Offset() 103 return GetFrameSize(type) - static_cast<size_t>(kX86_64PointerSize); in GetReturnPcOffset()
|
D | jni_frame_x86_64.h | 32 static_assert(kX86_64PointerSize == PointerSize::k64, "Unexpected x86_64 pointer size");
|
/art/compiler/utils/x86_64/ |
D | jni_macro_assembler_x86_64.cc | 88 static_assert(static_cast<size_t>(kX86_64PointerSize) == kFramePointerSize, in BuildFrame() 310 DCHECK_EQ(dest.GetSize(), static_cast<size_t>(kX86_64PointerSize)); in MoveArguments() 508 Address::Absolute(Thread::SelfOffset<kX86_64PointerSize>(), true)); in GetCurrentThread() 513 __ gs()->movq(scratch, Address::Absolute(Thread::SelfOffset<kX86_64PointerSize>(), true)); in GetCurrentThread() 521 constexpr ThreadOffset64 thread_flags_offset = Thread::ThreadFlagsOffset<kX86_64PointerSize>(); in TryToTransitionFromRunnableToNative() 523 Thread::HeldMutexOffset<kX86_64PointerSize>(kMutatorLock); in TryToTransitionFromRunnableToNative() 550 constexpr ThreadOffset64 thread_flags_offset = Thread::ThreadFlagsOffset<kX86_64PointerSize>(); in TryToTransitionFromNativeToRunnable() 552 Thread::HeldMutexOffset<kX86_64PointerSize>(kMutatorLock); in TryToTransitionFromNativeToRunnable() 554 Thread::MutatorLockOffset<kX86_64PointerSize>(); in TryToTransitionFromNativeToRunnable() 591 __ gs()->testl(Address::Absolute(Thread::ThreadFlagsOffset<kX86_64PointerSize>(), true), in SuspendCheck() [all …]
|
/art/compiler/jni/quick/x86_64/ |
D | calling_convention_x86_64.cc | 179 static_cast<size_t>(kX86_64PointerSize) + // Method ref in CurrentParamStackOffset() 195 kX86_64PointerSize) { in X86_64JniCallingConvention() 215 const size_t method_ptr_size = static_cast<size_t>(kX86_64PointerSize); in FrameSize()
|
/art/libartbase/arch/ |
D | instruction_set.h | 60 static constexpr PointerSize kX86_64PointerSize = PointerSize::k64; variable 105 return kX86_64PointerSize; in GetInstructionSetPointerSize()
|
/art/compiler/optimizing/ |
D | code_generator_x86_64.h | 32 static constexpr size_t kX86_64WordSize = static_cast<size_t>(kX86_64PointerSize); 102 kX86_64PointerSize) {} in InvokeRuntimeCallingConvention() 115 kX86_64PointerSize) {} in InvokeDexCallingConvention()
|
D | code_generator_x86_64.cc | 79 #define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kX86_64PointerSize, x).Int32Value() 598 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg); in EmitNativeCode() 690 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg); in EmitNativeCode() 1060 GetThreadOffset<kX86_64PointerSize>(kQuickCompileOptimized).Int32Value()); in EmitNativeCode() 1177 GetThreadOffset<kX86_64PointerSize>(invoke->GetStringInitEntryPoint()).Int32Value(); in GenerateStaticOrDirectCall() 1219 ArtMethod::EntryPointFromJniOffset(kX86_64PointerSize).SizeValue())); in GenerateStaticOrDirectCall() 1255 kX86_64PointerSize).SizeValue())); in GenerateStaticOrDirectCall() 1267 invoke->GetVTableIndex(), kX86_64PointerSize).SizeValue(); in GenerateVirtualCall() 1295 kX86_64PointerSize).SizeValue())); in GenerateVirtualCall() 1551 GenerateInvokeRuntime(GetThreadOffset<kX86_64PointerSize>(entrypoint).Int32Value()); in InvokeRuntime() [all …]
|
D | intrinsics_x86_64.cc | 126 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(TMP); in EmitNativeCode() 1800 GetAssembler()->gs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86_64PointerSize>(), in VisitThreadCurrentThread() 3301 ThreadOffset64 offset = Thread::WeakRefAccessEnabledOffset<kX86_64PointerSize>(); in VisitReferenceGetReferent() 3404 (Thread::InterruptedOffset<kX86_64PointerSize>().Int32Value(), /* no_rip= */ true); in VisitThreadInterrupted()
|