/arch/x86/kernel/cpu/ |
D | perfctr-watchdog.c | 101 int avail_to_resrv_perfctr_nmi_bit(unsigned int counter) in avail_to_resrv_perfctr_nmi_bit() argument 103 BUG_ON(counter > NMI_MAX_COUNTER_BITS); in avail_to_resrv_perfctr_nmi_bit() 105 return !test_bit(counter, perfctr_nmi_owner); in avail_to_resrv_perfctr_nmi_bit() 111 unsigned int counter; in reserve_perfctr_nmi() local 113 counter = nmi_perfctr_msr_to_bit(msr); in reserve_perfctr_nmi() 115 if (counter > NMI_MAX_COUNTER_BITS) in reserve_perfctr_nmi() 118 if (!test_and_set_bit(counter, perfctr_nmi_owner)) in reserve_perfctr_nmi() 126 unsigned int counter; in release_perfctr_nmi() local 128 counter = nmi_perfctr_msr_to_bit(msr); in release_perfctr_nmi() 130 if (counter > NMI_MAX_COUNTER_BITS) in release_perfctr_nmi() [all …]
|
/arch/arm/include/asm/ |
D | atomic.h | 27 #define atomic_read(v) READ_ONCE((v)->counter) 28 #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) 44 prefetchw(&v->counter); \ 51 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \ 52 : "r" (&v->counter), "Ir" (i) \ 62 prefetchw(&v->counter); \ 70 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \ 71 : "r" (&v->counter), "Ir" (i) \ 83 prefetchw(&v->counter); \ 91 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \ [all …]
|
/arch/powerpc/include/asm/ |
D | atomic.h | 32 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); in atomic_read() 39 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); in atomic_set() 53 : "=&r" (t), "+m" (v->counter) \ 54 : "r" (a), "r" (&v->counter) \ 69 : "=&r" (t), "+m" (v->counter) \ 70 : "r" (a), "r" (&v->counter) \ 87 : "=&r" (res), "=&r" (t), "+m" (v->counter) \ 88 : "r" (a), "r" (&v->counter) \ 136 : "=&r" (t), "+m" (v->counter) in ATOMIC_OPS() 137 : "r" (&v->counter) in ATOMIC_OPS() [all …]
|
/arch/arm64/include/asm/ |
D | arm_dsu_pmu.h | 53 static inline void __dsu_pmu_select_counter(int counter) in __dsu_pmu_select_counter() argument 55 write_sysreg_s(counter, CLUSTERPMSELR_EL1); in __dsu_pmu_select_counter() 59 static inline u64 __dsu_pmu_read_counter(int counter) in __dsu_pmu_read_counter() argument 61 __dsu_pmu_select_counter(counter); in __dsu_pmu_read_counter() 65 static inline void __dsu_pmu_write_counter(int counter, u64 val) in __dsu_pmu_write_counter() argument 67 __dsu_pmu_select_counter(counter); in __dsu_pmu_write_counter() 72 static inline void __dsu_pmu_set_event(int counter, u32 event) in __dsu_pmu_set_event() argument 74 __dsu_pmu_select_counter(counter); in __dsu_pmu_set_event() 90 static inline void __dsu_pmu_disable_counter(int counter) in __dsu_pmu_disable_counter() argument 92 write_sysreg_s(BIT(counter), CLUSTERPMCNTENCLR_EL1); in __dsu_pmu_disable_counter() [all …]
|
/arch/x86/include/asm/ |
D | atomic64_64.h | 22 return READ_ONCE((v)->counter); in arch_atomic64_read() 34 WRITE_ONCE(v->counter, i); in arch_atomic64_set() 47 : "=m" (v->counter) in arch_atomic64_add() 48 : "er" (i), "m" (v->counter) : "memory"); in arch_atomic64_add() 61 : "=m" (v->counter) in arch_atomic64_sub() 62 : "er" (i), "m" (v->counter) : "memory"); in arch_atomic64_sub() 76 return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i); in arch_atomic64_sub_and_test() 89 : "=m" (v->counter) in arch_atomic64_inc() 90 : "m" (v->counter) : "memory"); in arch_atomic64_inc() 103 : "=m" (v->counter) in arch_atomic64_dec() [all …]
|
D | atomic.h | 31 return READ_ONCE((v)->counter); in arch_atomic_read() 43 WRITE_ONCE(v->counter, i); in arch_atomic_set() 56 : "+m" (v->counter) in arch_atomic_add() 70 : "+m" (v->counter) in arch_atomic_sub() 85 return GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, e, "er", i); in arch_atomic_sub_and_test() 98 : "+m" (v->counter) :: "memory"); in arch_atomic_inc() 111 : "+m" (v->counter) :: "memory"); in arch_atomic_dec() 125 return GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, e); in arch_atomic_dec_and_test() 139 return GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, e); in arch_atomic_inc_and_test() 154 return GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, s, "er", i); in arch_atomic_add_negative() [all …]
|
D | local.h | 22 : "+m" (l->a.counter)); in local_inc() 28 : "+m" (l->a.counter)); in local_dec() 34 : "+m" (l->a.counter) in local_add() 41 : "+m" (l->a.counter) in local_sub() 56 return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i); in local_sub_and_test() 69 return GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, e); in local_dec_and_test() 82 return GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, e); in local_inc_and_test() 96 return GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, s, "er", i); in local_add_negative() 110 : "+r" (i), "+m" (l->a.counter) in local_add_return() 124 (cmpxchg_local(&((l)->a.counter), (o), (n))) [all …]
|
D | refcount.h | 46 : [var] "+m" (r->refs.counter) in refcount_add() 55 : [var] "+m" (r->refs.counter) in refcount_inc() 63 : [var] "+m" (r->refs.counter) in refcount_dec() 72 r->refs.counter, e, "er", i, "cx"); in refcount_sub_and_test() 86 r->refs.counter, e, "cx"); in refcount_dec_and_test() 111 : : [var] "m" (r->refs.counter) in refcount_add_not_zero()
|
/arch/s390/include/asm/ |
D | atomic.h | 26 : "=d" (c) : "Q" (v->counter)); in atomic_read() 34 : "=Q" (v->counter) : "d" (i)); in atomic_set() 39 return __atomic_add_barrier(i, &v->counter) + i; in atomic_add_return() 44 return __atomic_add_barrier(i, &v->counter); in atomic_fetch_add() 51 __atomic_add_const(i, &v->counter); in atomic_add() 55 __atomic_add(i, &v->counter); in atomic_add() 65 __atomic_##op(i, &v->counter); \ 69 return __atomic_##op##_barrier(i, &v->counter); \ 78 #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) in ATOMIC_OPS() 82 return __atomic_cmpxchg(&v->counter, old, new); in ATOMIC_OPS() [all …]
|
/arch/mips/include/asm/ |
D | atomic.h | 44 #define atomic_read(v) READ_ONCE((v)->counter) 53 #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i)) 70 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ 76 v->counter c_op i; \ 100 "+" GCC_OFF_SMALL_ASM() (v->counter) \ 106 result = v->counter; \ 108 v->counter = result; \ 134 "+" GCC_OFF_SMALL_ASM() (v->counter) \ 140 result = v->counter; \ 141 v->counter c_op i; \ [all …]
|
D | local.h | 46 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_add_return() 47 : "Ir" (i), "m" (l->a.counter) in local_add_return() 61 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_add_return() 62 : "Ir" (i), "m" (l->a.counter) in local_add_return() 68 result = l->a.counter; in local_add_return() 70 l->a.counter = result; in local_add_return() 93 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_sub_return() 94 : "Ir" (i), "m" (l->a.counter) in local_sub_return() 108 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) in local_sub_return() 109 : "Ir" (i), "m" (l->a.counter) in local_sub_return() [all …]
|
/arch/alpha/include/asm/ |
D | atomic.h | 30 #define atomic_read(v) READ_ONCE((v)->counter) 31 #define atomic64_read(v) READ_ONCE((v)->counter) 33 #define atomic_set(v,i) WRITE_ONCE((v)->counter, (i)) 34 #define atomic64_set(v,i) WRITE_ONCE((v)->counter, (i)) 54 :"=&r" (temp), "=m" (v->counter) \ 55 :"Ir" (i), "m" (v->counter)); \ 71 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \ 72 :"Ir" (i), "m" (v->counter) : "memory"); \ 89 :"=&r" (temp), "=m" (v->counter), "=&r" (result) \ 90 :"Ir" (i), "m" (v->counter) : "memory"); \ [all …]
|
D | local.h | 33 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result) in local_add_return() 34 :"Ir" (i), "m" (l->a.counter) : "memory"); in local_add_return() 50 :"=&r" (temp), "=m" (l->a.counter), "=&r" (result) in local_sub_return() 51 :"Ir" (i), "m" (l->a.counter) : "memory"); in local_sub_return() 56 (cmpxchg_local(&((l)->a.counter), (o), (n))) 57 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n))) 97 #define __local_inc(l) ((l)->a.counter++) 98 #define __local_dec(l) ((l)->a.counter++) 99 #define __local_add(i,l) ((l)->a.counter+=(i)) 100 #define __local_sub(i,l) ((l)->a.counter-=(i))
|
/arch/parisc/include/asm/ |
D | spinlock.h | 78 rw->counter++; in arch_read_lock() 90 rw->counter--; in arch_read_unlock() 103 rw->counter++; in arch_read_trylock() 111 if (rw->counter < 0) in arch_read_trylock() 115 while (arch_spin_is_locked(&rw->lock) && rw->counter >= 0) in arch_read_trylock() 130 if (rw->counter != 0) { in arch_write_lock() 134 while (rw->counter != 0) in arch_write_lock() 140 rw->counter = -1; /* mark as write-locked */ in arch_write_lock() 147 rw->counter = 0; in arch_write_unlock() 160 if (rw->counter == 0) { in arch_write_trylock() [all …]
|
D | atomic.h | 64 v->counter = i; in atomic_set() 73 return READ_ONCE((v)->counter); in atomic_read() 77 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) 78 #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 86 v->counter c_op i; \ 97 ret = (v->counter c_op i); \ 110 ret = v->counter; \ 111 v->counter c_op i; \ 151 v->counter c_op i; \ 162 ret = (v->counter c_op i); \ [all …]
|
/arch/h8300/include/asm/ |
D | atomic.h | 17 #define atomic_read(v) READ_ONCE((v)->counter) 18 #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 27 ret = v->counter c_op i; \ 39 ret = v->counter; \ 40 v->counter c_op i; \ 51 v->counter c_op i; \ 78 ret = v->counter; in atomic_cmpxchg() 80 v->counter = new; in atomic_cmpxchg() 91 ret = v->counter; in atomic_fetch_add_unless() 93 v->counter += a; in atomic_fetch_add_unless()
|
/arch/sparc/include/asm/ |
D | atomic_64.h | 18 #define atomic_read(v) READ_ONCE((v)->counter) 19 #define atomic64_read(v) READ_ONCE((v)->counter) 21 #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 22 #define atomic64_set(v, i) WRITE_ONCE(((v)->counter), (i)) 53 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) in ATOMIC_OPS() 57 return xchg(&v->counter, new); in ATOMIC_OPS() 61 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) 62 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
|
/arch/arc/include/asm/ |
D | atomic.h | 21 #define atomic_read(v) READ_ONCE((v)->counter) 25 #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 38 : [ctr] "r" (&v->counter), /* Not "m": llock only supports reg direct addr mode */ \ 60 : [ctr] "r" (&v->counter), \ 87 : [ctr] "r" (&v->counter), \ 101 #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 119 WRITE_ONCE(v->counter, i); in atomic_set() 138 v->counter c_op i; \ 152 temp = v->counter; \ 154 v->counter = temp; \ [all …]
|
D | spinlock.h | 101 : [rwlock] "r" (&(rw->counter)), in arch_read_lock() 125 : [rwlock] "r" (&(rw->counter)), in arch_read_trylock() 158 : [rwlock] "r" (&(rw->counter)), in arch_write_lock() 183 : [rwlock] "r" (&(rw->counter)), in arch_write_trylock() 209 : [rwlock] "r" (&(rw->counter)) in arch_read_unlock() 217 WRITE_ONCE(rw->counter, __ARCH_RW_LOCK_UNLOCKED__); in arch_write_unlock() 316 if (rw->counter > 0) { in arch_read_trylock() 317 rw->counter--; in arch_read_trylock() 342 if (rw->counter == __ARCH_RW_LOCK_UNLOCKED__) { in arch_write_trylock() 343 rw->counter = 0; in arch_write_trylock() [all …]
|
/arch/ia64/include/asm/ |
D | atomic.h | 25 #define atomic_read(v) READ_ONCE((v)->counter) 26 #define atomic64_read(v) READ_ONCE((v)->counter) 28 #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) 29 #define atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i)) 82 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \ 90 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \ 98 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \ 106 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \ 167 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \ 175 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \ [all …]
|
/arch/hexagon/include/asm/ |
D | atomic.h | 26 : "r" (&v->counter), "r" (new) in atomic_set() 39 #define atomic_read(v) READ_ONCE((v)->counter) 46 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) 78 : "r" (&v->counter), "r" (old), "r" (new) in atomic_cmpxchg() 96 : "r" (&v->counter), "r" (i) \ 112 : "r" (&v->counter), "r" (i) \ 129 : "r" (&v->counter), "r" (i) \
|
/arch/sh/include/asm/ |
D | atomic.h | 24 #define atomic_read(v) READ_ONCE((v)->counter) 25 #define atomic_set(v,i) WRITE_ONCE((v)->counter, (i)) 35 #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 36 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
|
/arch/csky/include/asm/ |
D | atomic.h | 29 : "r" (a), "r"(&v->counter), "r"(u) in __atomic_add_unless() 49 : "r" (i), "r"(&v->counter) \ 66 : "r" (i), "r"(&v->counter) \ 86 : "r" (i), "r"(&v->counter) \ 113 : "r" (a), "r"(&v->counter), "r"(u) in __atomic_add_unless() 133 : "r" (i), "r"(&v->counter) \ 152 : "r" (i), "r"(&v->counter) \ 173 : "r" (i), "r"(&v->counter) \
|
/arch/riscv/include/asm/ |
D | atomic.h | 32 return READ_ONCE(v->counter); in atomic_read() 36 WRITE_ONCE(v->counter, i); in atomic_set() 43 return READ_ONCE(v->counter); in atomic64_read() 47 WRITE_ONCE(v->counter, i); in atomic64_set() 62 : "+A" (v->counter) \ 98 : "+A" (v->counter), "=r" (ret) \ in ATOMIC_OPS() 109 : "+A" (v->counter), "=r" (ret) \ 214 : [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter) 235 : [p]"=&r" (prev), [rc]"=&r" (rc), [c]"+A" (v->counter) in atomic64_fetch_add_unless() 251 return __xchg_relaxed(&(v->counter), n, size); \ [all …]
|
/arch/sparc/lib/ |
D | atomic32.c | 38 ret = v->counter; \ 39 v->counter c_op i; \ 53 ret = (v->counter c_op i); \ 76 ret = v->counter; in atomic_xchg() 77 v->counter = new; in atomic_xchg() 89 ret = v->counter; in atomic_cmpxchg() 91 v->counter = new; in atomic_cmpxchg() 104 ret = v->counter; in atomic_fetch_add_unless() 106 v->counter += a; in atomic_fetch_add_unless() 118 v->counter = i; in atomic_set()
|