Home
last modified time | relevance | path

Searched refs:vcpu_is_preempted (Results 1 – 12 of 12) sorted by relevance

/arch/arm64/include/asm/
Dspinlock.h21 #define vcpu_is_preempted vcpu_is_preempted macro
22 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
/arch/powerpc/include/asm/
Dparavirt.h93 #define vcpu_is_preempted vcpu_is_preempted macro
94 static inline bool vcpu_is_preempted(int cpu) in vcpu_is_preempted() function
/arch/x86/include/asm/
Dqspinlock.h59 #define vcpu_is_preempted vcpu_is_preempted macro
60 static inline bool vcpu_is_preempted(long cpu) in vcpu_is_preempted() function
Dparavirt_types.h259 struct paravirt_callee_save vcpu_is_preempted; member
Dparavirt.h607 return PVOP_ALT_CALLEE1(bool, lock.vcpu_is_preempted, cpu, in pv_vcpu_is_preempted()
/arch/x86/kernel/
Dparavirt-spinlocks.c32 return pv_ops.lock.vcpu_is_preempted.func == in pv_is_native_vcpu_is_preempted()
Dkvm.c585 if (vcpu_is_preempted(cpu)) { in kvm_smp_send_call_func_ipi()
1001 pv_ops.lock.vcpu_is_preempted = in kvm_spinlock_init()
Dparavirt.c367 .lock.vcpu_is_preempted =
/arch/x86/hyperv/
Dhv_spinlock.c84 pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(hv_vcpu_is_preempted); in hv_init_spinlocks()
/arch/s390/include/asm/
Dspinlock.h25 #define vcpu_is_preempted arch_vcpu_is_preempted macro
/arch/x86/xen/
Dspinlock.c144 pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(xen_vcpu_stolen); in xen_init_spinlocks()
/arch/s390/kvm/
Ddiag.c191 if (!vcpu_is_preempted(tcpu_cpu)) in __diag_time_slice_end_directed()